summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--fonts/fea/context/greek-babel.fea (renamed from fonts/fea/context/greek-babel-extended.fea)161
-rw-r--r--fonts/fea/context/test-features.fea20
-rw-r--r--fonts/fea/context/texhistoric.fea42
-rw-r--r--fonts/fea/context/verbose-digits.fea20
-rw-r--r--metapost/context/base/metafun.mp14
-rw-r--r--metapost/context/base/mp-core.mp2
-rw-r--r--metapost/context/base/mp-spec.mp35
-rw-r--r--scripts/context/lua/luatools.lua865
-rw-r--r--scripts/context/lua/mtx-babel.lua150
-rw-r--r--scripts/context/lua/mtx-cache.lua8
-rw-r--r--scripts/context/lua/mtx-chars.lua8
-rw-r--r--scripts/context/lua/mtx-context.lua27
-rw-r--r--scripts/context/lua/mtx-convert.lua86
-rw-r--r--scripts/context/lua/mtx-fonts.lua9
-rw-r--r--scripts/context/lua/mtx-watch.lua224
-rw-r--r--scripts/context/lua/mtxrun.lua1292
-rw-r--r--scripts/context/lua/scite-ctx.lua924
-rw-r--r--scripts/context/ruby/base/kpse.rb8
-rw-r--r--scripts/context/ruby/base/tex.rb29
-rw-r--r--scripts/context/ruby/base/texutil.rb4
-rw-r--r--scripts/context/ruby/graphics/gs.rb6
-rw-r--r--scripts/context/ruby/texexec.rb15
-rw-r--r--scripts/context/ruby/www/exa.rb1
-rw-r--r--scripts/context/ruby/www/lib.rb8
-rw-r--r--tex/context/base/attr-ini.lua301
-rw-r--r--tex/context/base/attr-ini.tex48
-rw-r--r--tex/context/base/char-def.tex2
-rw-r--r--tex/context/base/char-ini.lua28
-rw-r--r--tex/context/base/char-syn.lua140
-rw-r--r--tex/context/base/char-utf.lua176
-rw-r--r--tex/context/base/colo-new.lua49
-rw-r--r--tex/context/base/colo-new.mkii6
-rw-r--r--tex/context/base/colo-new.mkiv7
-rw-r--r--tex/context/base/cont-new.mkiv84
-rw-r--r--tex/context/base/cont-new.tex20
-rw-r--r--tex/context/base/context.tex5
-rw-r--r--tex/context/base/core-buf.lua6
-rw-r--r--tex/context/base/core-des.tex4
-rw-r--r--tex/context/base/core-fig.tex10
-rw-r--r--tex/context/base/core-itm.tex42
-rw-r--r--tex/context/base/core-lst.tex55
-rw-r--r--tex/context/base/core-mis.tex10
-rw-r--r--tex/context/base/core-new.tex20
-rw-r--r--tex/context/base/core-ntb.tex13
-rw-r--r--tex/context/base/core-pgr.tex99
-rw-r--r--tex/context/base/core-pos.tex22
-rw-r--r--tex/context/base/core-reg.mkiv6
-rw-r--r--tex/context/base/core-spa.lua290
-rw-r--r--tex/context/base/core-spa.mkiv37
-rw-r--r--tex/context/base/core-spa.tex147
-rw-r--r--tex/context/base/core-syn.lua2
-rw-r--r--tex/context/base/core-syn.mkiv4
-rw-r--r--tex/context/base/core-tbl.tex67
-rw-r--r--tex/context/base/core-two.mkiv4
-rw-r--r--tex/context/base/core-uti.mkiv7
-rw-r--r--tex/context/base/core-uti.tex14
-rw-r--r--tex/context/base/enco-ini.mkiv4
-rw-r--r--tex/context/base/enco-ini.tex3
-rw-r--r--tex/context/base/font-afm.lua160
-rw-r--r--tex/context/base/font-def.lua125
-rw-r--r--tex/context/base/font-enc.lua17
-rw-r--r--tex/context/base/font-fbk.lua15
-rw-r--r--tex/context/base/font-ini.lua6
-rw-r--r--tex/context/base/font-ini.mkii31
-rw-r--r--tex/context/base/font-ini.mkiv44
-rw-r--r--tex/context/base/font-ini.tex61
-rw-r--r--tex/context/base/font-otf.lua1415
-rw-r--r--tex/context/base/font-syn.lua2
-rw-r--r--tex/context/base/font-tfm.lua131
-rw-r--r--tex/context/base/font-vf.lua44
-rw-r--r--tex/context/base/l-aux.lua67
-rw-r--r--tex/context/base/l-boolean.lua7
-rw-r--r--tex/context/base/l-dir.lua79
-rw-r--r--tex/context/base/l-io.lua141
-rw-r--r--tex/context/base/l-lpeg.lua41
-rw-r--r--tex/context/base/l-md5.lua2
-rw-r--r--tex/context/base/l-os.lua32
-rw-r--r--tex/context/base/l-table.lua126
-rw-r--r--tex/context/base/l-url.lua77
-rw-r--r--tex/context/base/l-xml.lua207
-rw-r--r--tex/context/base/lang-ini.lua520
-rw-r--r--tex/context/base/lang-ini.mkii133
-rw-r--r--tex/context/base/lang-ini.mkiv63
-rw-r--r--tex/context/base/lang-ini.tex398
-rw-r--r--tex/context/base/lang-sla.tex2
-rw-r--r--tex/context/base/luat-cbk.lua3
-rw-r--r--tex/context/base/luat-inp.lua549
-rw-r--r--tex/context/base/luat-lib.tex2
-rw-r--r--tex/context/base/luat-lmx.lua12
-rw-r--r--tex/context/base/luat-log.lua7
-rw-r--r--tex/context/base/luat-tex.lua117
-rw-r--r--tex/context/base/luat-tmp.lua27
-rw-r--r--tex/context/base/luat-tra.lua18
-rw-r--r--tex/context/base/luat-zip.lua4
-rw-r--r--tex/context/base/lxml-ini.lua58
-rw-r--r--tex/context/base/lxml-ini.tex93
-rw-r--r--tex/context/base/math-ext.tex40
-rw-r--r--tex/context/base/meta-pdf.lua49
-rw-r--r--tex/context/base/meta-pdf.mkii40
-rw-r--r--tex/context/base/meta-pdf.mkiv14
-rw-r--r--tex/context/base/meta-pdf.tex36
-rw-r--r--tex/context/base/mult-con.tex8
-rw-r--r--tex/context/base/mult-sys.tex1
-rw-r--r--tex/context/base/node-ini.lua931
-rw-r--r--tex/context/base/page-flt.tex24
-rw-r--r--tex/context/base/page-ini.tex1
-rw-r--r--tex/context/base/page-lin.lua232
-rw-r--r--tex/context/base/page-lin.mkii (renamed from tex/context/base/page-lin.tex)151
-rw-r--r--tex/context/base/page-lin.mkiv424
-rw-r--r--tex/context/base/page-mul.tex2
-rw-r--r--tex/context/base/page-par.tex58
-rw-r--r--tex/context/base/regi-ini.lua13
-rw-r--r--tex/context/base/regi-ini.mkii9
-rw-r--r--tex/context/base/s-abr-01.tex4
-rw-r--r--tex/context/base/sort-ini.mkii4
-rw-r--r--tex/context/base/spec-tpd.tex2
-rw-r--r--tex/context/base/supp-pdf.tex123
-rw-r--r--tex/context/base/syst-con.lua13
-rw-r--r--tex/context/base/syst-etx.tex2
-rw-r--r--tex/context/base/syst-mtx.tex2
-rw-r--r--tex/context/base/syst-omg.tex2
-rw-r--r--tex/context/base/thrd-trg.tex54
-rw-r--r--tex/context/base/type-enc.tex1
-rw-r--r--tex/context/base/type-one.tex15
-rw-r--r--tex/context/base/type-otf.tex15
-rw-r--r--tex/context/base/type-tmf.tex23
-rw-r--r--tex/context/base/type-xtx.tex146
-rw-r--r--tex/context/base/unic-ini.tex11
-rw-r--r--tex/context/base/x-cml.mkiv10
-rw-r--r--tex/context/base/x-fo.tex2
-rw-r--r--tex/context/base/x-mml.mkiv3
-rw-r--r--tex/context/base/x-newmml.tex2
-rw-r--r--tex/context/base/xtag-exp.tex6
-rw-r--r--tex/context/interface/cont-cz.xml7
-rw-r--r--tex/context/interface/cont-de.xml7
-rw-r--r--tex/context/interface/cont-en.xml7
-rw-r--r--tex/context/interface/cont-fr.xml7
-rw-r--r--tex/context/interface/cont-it.xml7
-rw-r--r--tex/context/interface/cont-nl.xml7
-rw-r--r--tex/context/interface/cont-ro.xml7
-rw-r--r--tex/context/interface/keys-cz.xml4
-rw-r--r--tex/context/interface/keys-de.xml4
-rw-r--r--tex/context/interface/keys-en.xml4
-rw-r--r--tex/context/interface/keys-fr.xml4
-rw-r--r--tex/context/interface/keys-it.xml4
-rw-r--r--tex/context/interface/keys-nl.xml4
-rw-r--r--tex/context/interface/keys-ro.xml4
-rw-r--r--tex/context/sample/sample.tex5
-rw-r--r--tex/context/sample/weisman.tex5
-rw-r--r--tex/context/test/x-cml-test.xml7
-rw-r--r--tex/generic/context/mptopdf.tex29
151 files changed, 8229 insertions, 4893 deletions
diff --git a/fonts/fea/context/greek-babel-extended.fea b/fonts/fea/context/greek-babel.fea
index 69d8b5d76..68f5cd145 100644
--- a/fonts/fea/context/greek-babel-extended.fea
+++ b/fonts/fea/context/greek-babel.fea
@@ -1,66 +1,22 @@
+# This file has been written by Arthur Reutenauer.
# An Opentype feature to replace the Babel input scheme
-# Not quite complete; some rhos with breathings and accents are missing (where
-# are they?) and the final sigma isn't accounted for.
-
-lookup GreekBabelLookupSimple {
- lookupflag 0 ;
- sub a by alpha ;
- sub b by beta ;
- sub g by gamma ;
- sub d by delta ;
- sub e by epsilon ;
- sub z by zeta ;
- sub h by eta ;
- sub j by theta ;
- sub i by iota ;
- sub k by kappa ;
- sub l by lambda ;
- sub m by mu ;
- sub n by nu ;
- sub x by xi ;
- sub o by omicron ;
- sub p by pi ;
- sub r by rho ;
- sub c by sigmafinal ;
- sub s by sigma ;
- sub t by tau ;
- sub u by upsilon ;
- sub f by phi ;
- sub q by chi ;
- sub y by psi ;
- sub w by omega ;
- sub A by Alpha ;
- sub B by Beta ;
- sub G by Gamma ;
- sub D by Delta ;
- sub E by Epsilon ;
- sub Z by Zeta ;
- sub H by Eta ;
- sub J by Theta ;
- sub I by Iota ;
- sub K by Kappa ;
- sub L by Lambda ;
- sub M by Mu ;
- sub N by Nu ;
- sub X by Xi ;
- sub O by Omicron ;
- sub P by Pi ;
- sub R by Rho ;
- sub C by Uni03C2 ;
- sub S by Sigma ;
- sub T by Tau ;
- sub U by Upsilon ;
- sub F by Phi ;
- sub Q by Chi ;
- sub Y by Psi ;
- sub W by Omega ;
- sub semicolon by periodcentered ;
-} GreekBabelLookupSimple ;
-
lookup GreekBabelLookupMultiple {
lookupflag 1 ;
- # sub s 'space by sigmafinal ;
+ sub quotedbl quotesingle i by uni1FD3 ;
+ sub quotedbl quotesingle u by uni1FE3 ;
+ sub quotedbl grave i by uni1FD2 ;
+ sub quotedbl grave u by uni1FE2 ;
+ sub quotedbl asciitilde i by uni1FD7 ;
+ sub quotedbl asciitilde u by uni1FE7 ;
+ sub quotedbl i by uni03CA ;
+ sub quotedbl u by uni03CB ;
+ sub equal a by uni1FB1 ;
+ sub equal i by uni1FD1 ;
+ sub equal u by uni1FE1 ;
+ sub equal quotesingle a by uniEB00 ;
+ sub equal quotesingle i by uniEB39 ;
+ sub equal u quotesingle by uniEB7A ;
sub greater a by uni1F00 ;
sub greater A by uni1F08 ;
sub greater e by uni1F10 ;
@@ -72,7 +28,6 @@ lookup GreekBabelLookupMultiple {
sub greater o by uni1F40 ;
sub greater O by uni1F48 ;
sub greater u by uni1F50 ;
- # sub greater U by uni1F58 ;
sub greater w by uni1F60 ;
sub greater W by uni1F68 ;
sub greater grave a by uni1F02 ;
@@ -86,7 +41,6 @@ lookup GreekBabelLookupMultiple {
sub greater grave o by uni1F42 ;
sub greater grave O by uni1F4A ;
sub greater grave u by uni1F52 ;
- # sub greater grave U by uni1F5A ;
sub greater grave w by uni1F62 ;
sub greater grave W by uni1F6A ;
sub greater quotesingle a by uni1F04 ;
@@ -248,28 +202,82 @@ lookup GreekBabelLookupMultiple {
sub less asciitilde w bar by uni1FA7 ;
sub less asciitilde W bar by uni1FAF ;
sub grave a bar by uni1FB2 ;
+ sub a bar by uni1FB3 ;
sub quotesingle a bar by uni1FB4 ;
+ sub h bar by uni1FC3 ;
sub grave h bar by uni1FC2 ;
sub quotesingle h bar by uni1FC4 ;
- sub grave w bar by uni1FD2 ;
- sub quotesingle w bar by uni1FD4 ;
+ sub grave w bar by uni1FF2 ;
+ sub w bar by uni1FF3 ;
+ sub quotesingle w bar by uni1FF4 ;
sub asciitilde a by uni1FB6 ;
sub asciitilde a bar by uni1FB7 ;
sub asciitilde h by uni1FC6 ;
sub asciitilde h bar by uni1FC7 ;
- sub asciitilde w by uni1FD6 ;
- sub asciitilde w bar by uni1FD7 ;
+ sub asciitilde i by uni1FD6 ;
+ sub asciitilde u by uni1FE6 ;
+ sub asciitilde w by uni1FF6 ;
+ sub asciitilde w bar by uni1FF7 ;
sub greater r by uni1FE4 ;
sub less r by uni1FE5 ;
sub less R by uni1FEC ;
} GreekBabelLookupMultiple ;
-lookup GreekBabel2LookupMultiple {
- lookupflag 1 ;
- sub alpha bar by uni1FB3 ;
- sub eta bar by uni1FC3 ;
- sub omega bar by uni1FF3 ;
-} GreekBabel2LookupMultiple ;
+lookup GreekBabelLookupSimple {
+ lookupflag 0 ;
+ sub a by alpha ;
+ sub b by beta ;
+ sub g by gamma ;
+ sub d by delta ;
+ sub e by epsilon ;
+ sub z by zeta ;
+ sub h by eta ;
+ sub j by theta ;
+ sub i by iota ;
+ sub k by kappa ;
+ sub l by lambda ;
+ sub m by mu ;
+ sub n by nu ;
+ sub x by xi ;
+ sub o by omicron ;
+ sub p by pi ;
+ sub r by rho ;
+ sub c by uni03F2 ;
+ sub s by uni03F2 ;
+ sub t by tau ;
+ sub u by upsilon ;
+ sub f by phi ;
+ sub q by chi ;
+ sub y by psi ;
+ sub w by omega ;
+ sub A by Alpha ;
+ sub B by Beta ;
+ sub G by Gamma ;
+ sub D by Delta ;
+ sub E by Epsilon ;
+ sub Z by Zeta ;
+ sub H by Eta ;
+ sub J by Theta ;
+ sub I by Iota ;
+ sub K by Kappa ;
+ sub L by Lambda ;
+ sub M by Mu ;
+ sub N by Nu ;
+ sub X by Xi ;
+ sub O by Omicron ;
+ sub P by Pi ;
+ sub R by Rho ;
+ sub C by Uni03C2 ;
+ sub S by uni03F9 ;
+ sub T by Tau ;
+ sub U by Upsilon ;
+ sub F by Phi ;
+ sub Q by Chi ;
+ sub Y by Psi ;
+ sub W by Omega ;
+ sub semicolon by anoteleia ;
+ sub exclam by dotbelowcomb ;
+} GreekBabelLookupSimple ;
feature grbl {
@@ -284,14 +292,3 @@ feature grbl {
lookup GreekBabelLookupSimple ;
} grbl ;
-feature grb2 {
-
- script DFLT ;
- language dflt ;
- lookup GreekBabel2LookupMultiple ;
-
- script latn;
- language dflt ;
- lookup GreekBabel2LookupMultiple ;
-} grb2 ;
-
diff --git a/fonts/fea/context/test-features.fea b/fonts/fea/context/test-features.fea
new file mode 100644
index 000000000..ffc3c588d
--- /dev/null
+++ b/fonts/fea/context/test-features.fea
@@ -0,0 +1,20 @@
+lookup TestLookupENGLISH {
+ lookupflag 0 ;
+ sub l a n g u a g e by e n g l i s h ;
+} TestLookupENGLISH ;
+
+lookup TestLookupDUTCH {
+ lookupflag 0 ;
+ sub l a n g u a g e by d u t c h ;
+} TestLookupDUTCH ;
+
+feature test {
+
+ script latn ;
+ language ENG exclude_dflt ;
+ lookup TestLookupENGLISH ;
+ language NLD exclude_dflt ;
+ lookup TestLookupDUTCH ;
+
+} test ;
+
diff --git a/fonts/fea/context/texhistoric.fea b/fonts/fea/context/texhistoric.fea
new file mode 100644
index 000000000..453ea0188
--- /dev/null
+++ b/fonts/fea/context/texhistoric.fea
@@ -0,0 +1,42 @@
+# The first MkIV OpenType Handler used internal methods, but
+# after we implemented other things it made more sense to
+# do the following. (HH)
+
+lookup TeXPseudoLigaturesLookup {
+ lookupflag 1 ;
+ sub hyphen hyphen hypen by emdash ;
+ sub hyphen hyphen by endash ;
+ sub hyphen endash by emdash ;
+ sub endash hyphen by emdash ;
+ sub quoteleft quoteleft by quotedblleft ;
+ sub quoteright quoteright by quotedblright ;
+ sub grave grave by quotedblleft ;
+ sub quotesingle quotesingle by quotedblright ;
+ sub comma comma by quotedblbase ;
+} TeXPseudoLigaturesLookup ;
+
+lookup TeXQuoteReplacementLookup {
+ lookupflag 1 ;
+ sub quotedbl by quotedblright ;
+ sub quotesingle by quoteright ;
+ sub grave by quoteleft ;
+} TeXQuoteReplacementLookup ;
+
+feature tlig {
+ script DFLT ;
+ language dflt ;
+ lookup TeXPseudoLigaturesLookup ;
+ script latn;
+ language dflt ;
+ lookup TeXPseudoLigaturesLookup ;
+} tlig ;
+
+feature trep {
+ script DFLT ;
+ language dflt ;
+ lookup TeXQuoteReplacementLookup ;
+ script latn;
+ language dflt ;
+ lookup TeXQuoteReplacementLookup ;
+} trep ;
+
diff --git a/fonts/fea/context/verbose-digits.fea b/fonts/fea/context/verbose-digits.fea
index 7fd1fcf85..866734853 100644
--- a/fonts/fea/context/verbose-digits.fea
+++ b/fonts/fea/context/verbose-digits.fea
@@ -1,15 +1,15 @@
lookup NumericLookupDFLT {
lookupflag 0 ;
- sub zero by z e r o ;
- sub one by o n e ;
- sub two by t w o ;
- sub three by t h r e e ;
- sub four by f o u r ;
- sub five by f i v e ;
- sub six by s i x ;
- sub seven by s e v e n ;
- sub eight by e i g h t ;
- sub nine by n i n e ;
+ sub zero by z e r o ;
+ sub one by o n e ;
+ sub two by t w o ;
+ sub three by t h r e e ;
+ sub four by f o u r ;
+ sub five by f i v e ;
+ sub six by s i x ;
+ sub seven by s e v e n ;
+ sub eight by e i g h t ;
+ sub nine by n i n e ;
} NumericLookupDFLT ;
feature verb {
diff --git a/metapost/context/base/metafun.mp b/metapost/context/base/metafun.mp
index cfbf7b2f9..98ea1980f 100644
--- a/metapost/context/base/metafun.mp
+++ b/metapost/context/base/metafun.mp
@@ -53,12 +53,14 @@ input mp-func.mp ;
string metafunversion ;
-metafunversion = "metafun" & " " &
- decimal year & "-" &
- decimal month & "-" &
- decimal day & " " &
- decimal (time div 60) & ":" &
- decimal (time-(time div 60)*60) ;
+metafunversion = "metafun" & " " &
+ decimal year & "-" &
+ decimal month & "-" &
+ decimal day & " " &
+ if ((time div 60) < 10) : "0" & fi
+ decimal (time div 60) & ":" &
+ if ((time-(time div 60)*60) < 10) : "0" & fi
+ decimal (time-(time div 60)*60) ;
let normalend = end ;
diff --git a/metapost/context/base/mp-core.mp b/metapost/context/base/mp-core.mp
index c39aa406f..7967b718f 100644
--- a/metapost/context/base/mp-core.mp
+++ b/metapost/context/base/mp-core.mp
@@ -1000,6 +1000,7 @@ color boxfillcolor ; boxfillcolor := .8white ;
numeric boxgridtype ; boxgridtype := 0 ;
numeric boxlinetype ; boxlinetype := 1 ;
numeric boxfilltype ; boxfilltype := 1 ;
+numeric boxdashtype ; boxdashtype := 0 ;
pair boxgriddirection ; boxgriddirection := up ;
numeric boxgridwidth ; boxgridwidth := 1pt ;
numeric boxlinewidth ; boxlinewidth := 1pt ;
@@ -1166,6 +1167,7 @@ vardef baseline_grid (expr pxy, pdir, at_baseline) =
save i, grid, bb ; picture grid ; pair start ; path bb ;
def _do_ (expr start) =
draw start -- start shifted (bbwidth(pxy),0)
+ if boxdashtype = 1 : dashed evenly fi
withpen pencircle scaled boxgridwidth
withcolor boxgridcolor ;
enddef ;
diff --git a/metapost/context/base/mp-spec.mp b/metapost/context/base/mp-spec.mp
index 43d8f095c..d4c2b8cfc 100644
--- a/metapost/context/base/mp-spec.mp
+++ b/metapost/context/base/mp-spec.mp
@@ -230,14 +230,31 @@ enddef ;
vardef _is_spot_(expr c) =
(redpart c = _special_signal_/_special_div_) and (greenpart c = 2/_special_div_)
enddef ;
+vardef _is_gray_(expr c) =
+ (redpart c = greenpart c) and (greenpart c = bluepart c)
+enddef ;
numeric mp_shade_version ; mp_shade_version := 2 ; % more colors, needs new backend
vardef define_linear_shade (expr a, b, ca, cb) =
- if (mp_shade_version > 1) and _is_cmyk_(ca) and _is_cmyk_(cb) :
+ save cmyk_a, cmyk_b ; boolean cmyk_a, cmyk_b ;
+ save gray_a, gray_b ; boolean gray_a, gray_b ;
+ cmyk_a := _is_cmyk_(ca) ; gray_a := _is_gray_(ca) ;
+ cmyk_b := _is_cmyk_(cb) ; gray_b := _is_gray_(cb) ;
+ if (mp_shade_version > 1) and cmyk_a and cmyk_b :
flush_special(32, 17, "0 1 " & decimal shadefactor & " " &
cmykcolorpattern[bluepart ca] & " " & ddecimal (a shifted shadeoffset) & " " &
cmykcolorpattern[bluepart cb] & " " & ddecimal (b shifted shadeoffset) ) ;
+ elseif (mp_shade_version > 1) and cmyk_a and gray_b :
+ save cg ; color cg ; cg := cmyk(0,0,0,1-greenpart cb) ;
+ flush_special(32, 17, "0 1 " & decimal shadefactor & " " &
+ cmykcolorpattern[bluepart ca] & " " & ddecimal (a shifted shadeoffset) & " " &
+ cmykcolorpattern[bluepart cg] & " " & ddecimal (b shifted shadeoffset) ) ;
+ elseif (mp_shade_version > 1) and gray_a and cmyk_b :
+ save cg ; color cg ; cg := cmyk(0,0,0,1-greenpart ca) ;
+ flush_special(32, 17, "0 1 " & decimal shadefactor & " " &
+ cmykcolorpattern[bluepart cg] & " " & ddecimal (a shifted shadeoffset) & " " &
+ cmykcolorpattern[bluepart cb] & " " & ddecimal (b shifted shadeoffset) ) ;
elseif (mp_shade_version > 1) and _is_spot_(ca) and _is_spot_(cb) :
flush_special(34, 17, "0 1 " & decimal shadefactor & " " &
spotcolorpattern[bluepart ca] & " " & ddecimal (a shifted shadeoffset) & " " &
@@ -251,10 +268,24 @@ vardef define_linear_shade (expr a, b, ca, cb) =
enddef ;
vardef define_circular_shade (expr a, b, ra, rb, ca, cb) =
- if (mp_shade_version > 1) and _is_cmyk_(ca) and _is_cmyk_(cb) :
+ save cmyk_a, cmyk_b ; boolean cmyk_a, cmyk_b ;
+ save gray_a, gray_b ; boolean gray_a, gray_b ;
+ cmyk_a := _is_cmyk_(ca) ; gray_a := _is_gray_(ca) ;
+ cmyk_b := _is_cmyk_(cb) ; gray_b := _is_gray_(cb) ;
+ if (mp_shade_version > 1) and cmyk_a and cmyk_b :
flush_special(33, 19, "0 1 " & decimal shadefactor & " " &
cmykcolorpattern[bluepart ca] & " " & ddecimal (a shifted shadeoffset) & " " & decimal ra & " " &
cmykcolorpattern[bluepart cb] & " " & ddecimal (b shifted shadeoffset) & " " & decimal rb ) ;
+ elseif (mp_shade_version > 1) and cmyk_a and gray_b :
+ save cg ; color cg ; cg := cmyk(0,0,0,1-greenpart cb) ;
+ flush_special(33, 19, "0 1 " & decimal shadefactor & " " &
+ cmykcolorpattern[bluepart ca] & " " & ddecimal (a shifted shadeoffset) & " " & decimal ra & " " &
+ cmykcolorpattern[bluepart cg] & " " & ddecimal (b shifted shadeoffset) & " " & decimal rb ) ;
+ elseif (mp_shade_version > 1) and gray_a and cmyk_b :
+ save cg ; color cg ; cg := cmyk(0,0,0,1-greenpart ca) ;
+ flush_special(33, 19, "0 1 " & decimal shadefactor & " " &
+ cmykcolorpattern[bluepart cg] & " " & ddecimal (a shifted shadeoffset) & " " & decimal ra & " " &
+ cmykcolorpattern[bluepart cb] & " " & ddecimal (b shifted shadeoffset) & " " & decimal rb ) ;
elseif (mp_shade_version > 1) and _is_spot_(ca) and _is_spot_(cb) :
flush_special(35, 19, "0 1 " & decimal shadefactor & " " &
spotcolorpattern[bluepart ca] & " " & ddecimal (a shifted shadeoffset) & " " & decimal ra & " " &
diff --git a/scripts/context/lua/luatools.lua b/scripts/context/lua/luatools.lua
index d53180cfa..1dc67519e 100644
--- a/scripts/context/lua/luatools.lua
+++ b/scripts/context/lua/luatools.lua
@@ -141,6 +141,10 @@ function string.piecewise(str, pat, fnc) -- variant of split
for k in string.splitter(str,pat) do fnc(k) end
end
+--~ function string.piecewise(str, pat, fnc) -- variant of split
+--~ for k in str:splitter(pat) do fnc(k) end
+--~ end
+
--~ do if lpeg then
--~ -- this alternative is 30% faster esp when we cache them
@@ -158,7 +162,7 @@ end
--~ split = lpeg.Ct(c*(p*c)^0)
--~ splitters[separator] = split
--~ end
---~ return lpeg.match(split,self)
+--~ return lpeg.match(split,self) -- split:match(self)
--~ else
--~ return { }
--~ end
@@ -315,7 +319,7 @@ end
--~ return self .. self.rep(chr or " ",n-#self)
--~ end
-function string:padd(n,chr)
+function string:rpadd(n,chr)
local m = n-#self
if m > 0 then
return self .. self.rep(chr or " ",m)
@@ -324,6 +328,17 @@ function string:padd(n,chr)
end
end
+function string:lpadd(n,chr)
+ local m = n-#self
+ if m > 0 then
+ return self.rep(chr or " ",m) .. self
+ else
+ return self
+ end
+end
+
+string.padd = string.rpadd
+
function is_number(str)
return str:find("^[%-%+]?[%d]-%.?[%d+]$") == 1
end
@@ -349,6 +364,49 @@ function string:split_settings() -- no {} handling, see l-aux for lpeg variant
end
+-- filename : l-lpeg.lua
+-- author : Hans Hagen, PRAGMA-ADE, Hasselt NL
+-- copyright: PRAGMA ADE / ConTeXt Development Team
+-- license : see context related readme files
+
+if not versions then versions = { } end versions['l-lpeg'] = 1.001
+
+--~ l-lpeg.lua :
+
+--~ lpeg.digit = lpeg.R('09')^1
+--~ lpeg.sign = lpeg.S('+-')^1
+--~ lpeg.cardinal = lpeg.P(lpeg.sign^0 * lpeg.digit^1)
+--~ lpeg.integer = lpeg.P(lpeg.sign^0 * lpeg.digit^1)
+--~ lpeg.float = lpeg.P(lpeg.sign^0 * lpeg.digit^0 * lpeg.P('.') * lpeg.digit^1)
+--~ lpeg.number = lpeg.float + lpeg.integer
+--~ lpeg.oct = lpeg.P("0") * lpeg.R('07')^1
+--~ lpeg.hex = lpeg.P("0x") * (lpeg.R('09') + lpeg.R('AF'))^1
+--~ lpeg.uppercase = lpeg.P("AZ")
+--~ lpeg.lowercase = lpeg.P("az")
+
+--~ lpeg.eol = lpeg.S('\r\n\f')^1 -- includes formfeed
+--~ lpeg.space = lpeg.S(' ')^1
+--~ lpeg.nonspace = lpeg.P(1-lpeg.space)^1
+--~ lpeg.whitespace = lpeg.S(' \r\n\f\t')^1
+--~ lpeg.nonwhitespace = lpeg.P(1-lpeg.whitespace)^1
+
+function lpeg.anywhere(pattern) --slightly adapted from website
+ return lpeg.P { lpeg.P(pattern) + 1 * lpeg.V(1) }
+end
+
+function lpeg.startswith(pattern) --slightly adapted
+ return lpeg.P(pattern)
+end
+
+--~ g = lpeg.splitter(" ",function(s) ... end) -- gmatch:lpeg = 3:2
+
+function lpeg.splitter(pattern, action)
+ return (((1-lpeg.P(pattern))^1)/action+1)^0
+end
+
+
+
+
-- filename : l-table.lua
-- comment : split off from luat-lib
-- author : Hans Hagen, PRAGMA-ADE, Hasselt NL
@@ -422,6 +480,7 @@ function table.merge(t, ...)
t[k] = v
end
end
+ return t
end
function table.merged(...)
@@ -434,6 +493,25 @@ function table.merged(...)
return tmp
end
+function table.imerge(t, ...)
+ for _, list in ipairs({...}) do
+ for k,v in ipairs(list) do
+ t[#t+1] = v
+ end
+ end
+ return t
+end
+
+function table.imerged(...)
+ local tmp = { }
+ for _, list in ipairs({...}) do
+ for _,v in pairs(list) do
+ tmp[#tmp+1] = v
+ end
+ end
+ return tmp
+end
+
if not table.fastcopy then
function table.fastcopy(old) -- fast one
@@ -441,11 +519,15 @@ if not table.fastcopy then
local new = { }
for k,v in pairs(old) do
if type(v) == "table" then
- new[k] = table.copy(v)
+ new[k] = table.fastcopy(v) -- was just table.copy
else
new[k] = v
end
end
+ local mt = getmetatable(old)
+ if mt then
+ setmetatable(new,mt)
+ end
return new
else
return { }
@@ -456,30 +538,32 @@ end
if not table.copy then
- function table.copy(t, _lookup_table) -- taken from lua wiki
- _lookup_table = _lookup_table or { }
+ function table.copy(t, tables) -- taken from lua wiki, slightly adapted
+ tables = tables or { }
local tcopy = {}
- if not _lookup_table[t] then
- _lookup_table[t] = tcopy
+ if not tables[t] then
+ tables[t] = tcopy
end
- for i,v in pairs(t) do
+ for i,v in pairs(t) do -- brrr, what happens with sparse indexed
if type(i) == "table" then
- if _lookup_table[i] then
- i = _lookup_table[i]
+ if tables[i] then
+ i = tables[i]
else
- i = table.copy(i, _lookup_table)
+ i = table.copy(i, tables)
end
end
if type(v) ~= "table" then
tcopy[i] = v
+ elseif tables[v] then
+ tcopy[i] = tables[v]
else
- if _lookup_table[v] then
- tcopy[i] = _lookup_table[v]
- else
- tcopy[i] = table.copy(v, _lookup_table)
- end
+ tcopy[i] = table.copy(v, tables)
end
end
+ local mt = getmetatable(t)
+ if mt then
+ setmetatable(tcopy,mt)
+ end
return tcopy
end
@@ -514,6 +598,8 @@ end
do
+ -- one of my first exercises in lua ...
+
-- 34.055.092 32.403.326 arabtype.tma
-- 1.620.614 1.513.863 lmroman10-italic.tma
-- 1.325.585 1.233.044 lmroman10-regular.tma
@@ -873,6 +959,25 @@ function table.tohash(t)
return h
end
+function table.contains(t, v)
+ if t then
+ for i=1, #t do
+ if t[i] == v then
+ return true
+ end
+ end
+ end
+ return false
+end
+
+function table.count(t)
+ local n, e = 0, next(t)
+ while e do
+ n, e = n + 1, next(t,e)
+ end
+ return n
+end
+
--~ function table.are_equal(a,b)
--~ return table.serialize(a) == table.serialize(b)
--~ end
@@ -1053,6 +1158,38 @@ do
end
+function io.ask(question,default,options)
+ while true do
+ io.write(question)
+ if options then
+ io.write(string.format(" [%s]",table.concat(options,"|")))
+ end
+ if default then
+ io.write(string.format(" [%s]",default))
+ end
+ io.write(string.format(" "))
+ local answer = io.read()
+ answer = answer:gsub("^%s*(.*)%s*$","%1")
+ if answer == "" and default then
+ return default
+ elseif not options then
+ return answer
+ else
+ for _,v in pairs(options) do
+ if v == answer then
+ return answer
+ end
+ end
+ local pattern = "^" .. answer
+ for _,v in pairs(options) do
+ if v:find(pattern) then
+ return v
+ end
+ end
+ end
+ end
+end
+
-- filename : l-number.lua
-- comment : split off from luat-lib
@@ -1064,6 +1201,31 @@ if not versions then versions = { } end versions['l-number'] = 1.001
if not number then number = { } end
+-- a,b,c,d,e,f = number.toset(100101)
+
+function number.toset(n)
+ return (tostring(n)):match("(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)")
+end
+
+-- the lpeg way is slower on 8 digits, but faster on 4 digits, some 7.5%
+-- on
+--
+-- for i=1,1000000 do
+-- local a,b,c,d,e,f,g,h = number.toset(12345678)
+-- local a,b,c,d = number.toset(1234)
+-- local a,b,c = number.toset(123)
+-- end
+--
+-- of course dedicated "(.)(.)(.)(.)" matches are even faster
+
+do
+ local one = lpeg.C(1-lpeg.S(''))^1
+
+ function number.toset(n)
+ return lpeg.match(one,tostring(n))
+ end
+end
+
-- filename : l-os.lua
@@ -1110,7 +1272,7 @@ if md5 then do
if not md5.HEX then function md5.HEX(str) return convert(str,"%02X") end end
if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end
- if not md5.dec then function md5.dec(str) return convert(stt,"%03i") end end
+ if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end
end end
@@ -1138,7 +1300,7 @@ function file.addsuffix(filename, suffix)
end
function file.replacesuffix(filename, suffix)
- return filename:gsub("%.%a+$", "." .. suffix)
+ return (filename:gsub("%.%a+$", "." .. suffix))
end
function file.dirname(name)
@@ -1150,7 +1312,7 @@ function file.basename(name)
end
function file.extname(name)
- return name:match("^.+%.(.-)$") or ""
+ return name:match("^.+%.([^/\\]-)$") or ""
end
function file.join(...)
@@ -1252,15 +1414,18 @@ dir = { }
if lfs then
function dir.glob_pattern(path,patt,recurse,action)
- for name in lfs.dir(path) do
- local full = path .. '/' .. name
- local mode = lfs.attributes(full,'mode')
- if mode == 'file' then
- if name:find(patt) then
- action(full)
+ local ok, scanner = xpcall(function() return lfs.dir(path) end, function() end) -- kepler safe
+ if ok and type(scanner) == "function" then
+ for name in scanner do
+ local full = path .. '/' .. name
+ local mode = lfs.attributes(full,'mode')
+ if mode == 'file' then
+ if name:find(patt) then
+ action(full)
+ end
+ elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
+ dir.glob_pattern(full,patt,recurse,action)
end
- elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
- dir.glob_pattern(full,patt,recurse,action)
end
end
end
@@ -1285,6 +1450,30 @@ if lfs then
return t
end
+ function dir.globfiles(path,recurse,func,files)
+ if type(func) == "string" then
+ local s = func -- alas, we need this indirect way
+ func = function(name) return name:find(s) end
+ end
+ files = files or { }
+ for name in lfs.dir(path) do
+ if name:find("^%.") then
+ --- skip
+ elseif lfs.attributes(name,'mode') == "directory" then
+ if recurse then
+ dir.globfiles(path .. "/" .. name,recurse,func,files)
+ end
+ elseif func then
+ if func(name) then
+ files[#files+1] = path .. "/" .. name
+ end
+ else
+ files[#files+1] = path .. "/" .. name
+ end
+ end
+ return files
+ end
+
-- t = dir.glob("c:/data/develop/context/sources/**/????-*.tex")
-- t = dir.glob("c:/data/develop/tex/texmf/**/*.tex")
-- t = dir.glob("c:/data/develop/context/texmf/**/*.tex")
@@ -1346,11 +1535,21 @@ function boolean.tonumber(b)
if b then return 1 else return 0 end
end
-function toboolean(str)
- if type(str) == "string" then
- return str == "true" or str == "yes" or str == "on" or str == "1"
- elseif type(str) == "number" then
- return tonumber(str) ~= 0
+function toboolean(str,tolerant)
+ if tolerant then
+ if type(str) == "string" then
+ return str == "true" or str == "yes" or str == "on" or str == "1"
+ elseif type(str) == "number" then
+ return tonumber(str) ~= 0
+ elseif type(str) == "nil" then
+ return false
+ else
+ return str
+ end
+ elseif str == "true" then
+ return true
+ elseif str == "false" then
+ return false
else
return str
end
@@ -1641,10 +1840,15 @@ function utils.merger.selfclean(name)
)
end
+utils.lua.compile_strip = true
+
function utils.lua.compile(luafile, lucfile)
-- utils.report("compiling",luafile,"into",lucfile)
os.remove(lucfile)
- local command = "-s -o " .. string.quote(lucfile) .. " " .. string.quote(luafile)
+ local command = "-o " .. string.quote(lucfile) .. " " .. string.quote(luafile)
+ if utils.lua.compile_strip then
+ command = "-s " .. command
+ end
if os.execute("texluac " .. command) == 0 then
return true
elseif os.execute("luac " .. command) == 0 then
@@ -1742,6 +1946,10 @@ function environment.showarguments()
end
end
+function environment.setargument(name,value)
+ environment.arguments[name] = value
+end
+
function environment.argument(name)
if environment.arguments[name] then
return environment.arguments[name]
@@ -1823,6 +2031,7 @@ end
-- Beware, loading and saving is overloaded in luat-tmp!
-- todo: instances.[hashes,cnffiles,configurations,522] -> ipairs (alles check, sneller)
+-- todo: check escaping in find etc, too much, too slow
if not versions then versions = { } end versions['luat-inp'] = 1.001
if not environment then environment = { } end
@@ -2060,7 +2269,7 @@ input.settrace(tonumber(os.getenv("MTX.INPUT.TRACE") or os.getenv("MTX_INPUT_TRA
-- These functions can be used to test the performance, especially
-- loading the database files.
-function input.start_timing(instance)
+function input.starttiming(instance)
if instance then
instance.starttime = os.clock()
if not instance.loadtime then
@@ -2069,7 +2278,7 @@ function input.start_timing(instance)
end
end
-function input.stop_timing(instance, report)
+function input.stoptiming(instance, report)
if instance and instance.starttime then
instance.stoptime = os.clock()
local loadtime = instance.stoptime - instance.starttime
@@ -2083,9 +2292,6 @@ function input.stop_timing(instance, report)
end
end
-input.stoptiming = input.stop_timing
-input.starttiming = input.start_timing
-
function input.elapsedtime(instance)
return string.format("%0.3f",instance.loadtime or 0)
end
@@ -2398,99 +2604,106 @@ function input.generatedatabase(instance,specification)
return input.methodhandler('generators', instance, specification)
end
-function input.generators.tex(instance,specification)
- local tag = specification
- if not instance.lsrmode and lfs and lfs.dir then
- input.report("scanning path",specification)
- instance.files[tag] = { }
- local files = instance.files[tag]
- local n, m, r = 0, 0, 0
- local spec = specification .. '/'
- local attributes = lfs.attributes
- local directory = lfs.dir
- local small = instance.smallcache
- local function action(path)
- local mode, full
- if path then
- full = spec .. path .. '/'
- else
- full = spec
- end
- for name in directory(full) do
- if name:find("^%.") then
- -- skip
- elseif name:find("[%~%`%!%#%$%%%^%&%*%(%)%=%{%}%[%]%:%;\"\'%|%|%<%>%,%?\n\r\t]") then
- -- texio.write_nl("skipping " .. name)
- -- skip
+do
+
+ local weird = lpeg.anywhere(lpeg.S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+
+ function input.generators.tex(instance,specification)
+ local tag = specification
+ if not instance.lsrmode and lfs and lfs.dir then
+ input.report("scanning path",specification)
+ instance.files[tag] = { }
+ local files = instance.files[tag]
+ local n, m, r = 0, 0, 0
+ local spec = specification .. '/'
+ local attributes = lfs.attributes
+ local directory = lfs.dir
+ local small = instance.smallcache
+ local function action(path)
+ local mode, full
+ if path then
+ full = spec .. path .. '/'
else
- mode = attributes(full..name,'mode')
- if mode == "directory" then
- m = m + 1
- if path then
- action(path..'/'..name)
- else
- action(name)
- end
- elseif path and mode == 'file' then
- n = n + 1
- local f = files[name]
- if f then
- if not small then
- if type(f) == 'string' then
- files[name] = { f, path }
- else
- f[#f+1] = path
- end
+ full = spec
+ end
+ for name in directory(full) do
+ if name:find("^%.") then
+ -- skip
+ -- elseif name:find("[%~%`%!%#%$%%%^%&%*%(%)%=%{%}%[%]%:%;\"\'%|%<%>%,%?\n\r\t]") then -- too much escaped
+ elseif weird:match(name) then
+ -- texio.write_nl("skipping " .. name)
+ -- skip
+ else
+ mode = attributes(full..name,'mode')
+ if mode == "directory" then
+ m = m + 1
+ if path then
+ action(path..'/'..name)
+ else
+ action(name)
end
- else
- files[name] = path
- local lower = name:lower()
- if name ~= lower then
- files["remap:"..lower] = name
- r = r + 1
+ elseif path and mode == 'file' then
+ n = n + 1
+ local f = files[name]
+ if f then
+ if not small then
+ if type(f) == 'string' then
+ files[name] = { f, path }
+ else
+ f[#f+1] = path
+ end
+ end
+ else
+ files[name] = path
+ local lower = name:lower()
+ if name ~= lower then
+ files["remap:"..lower] = name
+ r = r + 1
+ end
end
end
end
end
end
- end
- action()
- input.report(string.format("%s files found on %s directories with %s uppercase remappings",n,m,r))
- else
- local fullname = file.join(specification,input.lsrname)
- local path = '.'
- local f = io.open(fullname)
- if f then
- instance.files[tag] = { }
- local files = instance.files[tag]
- local small = instance.smallcache
- input.report("loading lsr file",fullname)
- -- for line in f:lines() do -- much slower then the next one
- for line in (f:read("*a")):gmatch("(.-)\n") do
- if line:find("^[%a%d]") then
- local fl = files[line]
- if fl then
- if not small then
- if type(fl) == 'string' then
- files[line] = { fl, path } -- table
- else
- fl[#fl+1] = path
+ action()
+ input.report(string.format("%s files found on %s directories with %s uppercase remappings",n,m,r))
+ else
+ local fullname = file.join(specification,input.lsrname)
+ local path = '.'
+ local f = io.open(fullname)
+ if f then
+ instance.files[tag] = { }
+ local files = instance.files[tag]
+ local small = instance.smallcache
+ input.report("loading lsr file",fullname)
+ -- for line in f:lines() do -- much slower then the next one
+ for line in (f:read("*a")):gmatch("(.-)\n") do
+ if line:find("^[%a%d]") then
+ local fl = files[line]
+ if fl then
+ if not small then
+ if type(fl) == 'string' then
+ files[line] = { fl, path } -- table
+ else
+ fl[#fl+1] = path
+ end
+ end
+ else
+ files[line] = path -- string
+ local lower = line:lower()
+ if line ~= lower then
+ files["remap:"..lower] = line
end
end
else
- files[line] = path -- string
- local lower = line:lower()
- if line ~= lower then
- files["remap:"..lower] = line
- end
+ path = line:match("%.%/(.-)%:$") or path -- match could be nil due to empty line
end
- else
- path = line:match("%.%/(.-)%:$") or path -- match could be nil due to empty line
end
+ f:close()
end
- f:close()
end
end
+
end
-- savers, todo
@@ -2790,7 +3003,7 @@ end
function input.list_configurations(instance)
for _,key in pairs(table.sortedkeys(instance.kpsevars)) do
- if not instance.pattern or instance.pattern == "" or key:find(instance.pattern) then
+ if not instance.pattern or (instance.pattern=="") or key:find(instance.pattern) then
print(key.."\n")
for i,c in ipairs(instance.order) do
local str = c[key]
@@ -2913,10 +3126,168 @@ end
-- a,b,c/{p,q,r}/d/{x,y,z}//
-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+
+-- this one is better and faster, but it took me a while to realize
+-- that this kind of replacement is cleaner than messy parsing and
+-- fuzzy concatenating we can probably gain a bit with selectively
+-- applying lpeg, but experiments with lpeg parsing this proved not to
+-- work that well; the parsing is ok, but dealing with the resulting
+-- table is a pain because we need to work inside-out recursively
+
+--~ function input.aux.splitpathexpr(str, t, validate)
+--~ -- no need for optimization, only called a few times, we can use lpeg for the sub
+--~ t = t or { }
+--~ while true do
+--~ local done = false
+--~ while true do
+--~ ok = false
+--~ str = str:gsub("([^{},]+){([^{}]-)}", function(a,b)
+--~ local t = { }
+--~ for s in b:gmatch("([^,]+)") do
+--~ t[#t+1] = a .. s
+--~ end
+--~ ok, done = true, true
+--~ return "{" .. table.concat(t,",") .. "}"
+--~ end)
+--~ if not ok then break end
+--~ end
+--~ while true do
+--~ ok = false
+--~ str = str:gsub("{([^{}]-)}([^{},]+)", function(a,b)
+--~ local t = { }
+--~ for s in a:gmatch("([^,]+)") do
+--~ t[#t+1] = s .. b
+--~ end
+--~ ok, done = true, true
+--~ return "{" .. table.concat(t,",") .. "}"
+--~ end)
+--~ if not ok then break end
+--~ end
+--~ while true do
+--~ ok = false
+--~ str = str:gsub("([,{]){([^{}]+)}([,}])", function(a,b,c)
+--~ ok, done = true, true
+--~ return a .. b .. c
+--~ end)
+--~ if not ok then break end
+--~ end
+--~ if not done then break end
+--~ end
+--~ while true do
+--~ ok = false
+--~ str = str:gsub("{([^{}]-)}{([^{}]-)}", function(a,b)
+--~ local t = { }
+--~ for sa in a:gmatch("([^,]+)") do
+--~ for sb in b:gmatch("([^,]+)") do
+--~ t[#t+1] = sa .. sb
+--~ end
+--~ end
+--~ ok = true
+--~ return "{" .. table.concat(t,",") .. "}"
+--~ end)
+--~ if not ok then break end
+--~ end
+--~ while true do
+--~ ok = false
+--~ str = str:gsub("{([^{}]-)}", function(a)
+--~ ok = true
+--~ return a
+--~ end)
+--~ if not ok then break end
+--~ end
+--~ if validate then
+--~ for s in str:gmatch("([^,]+)") do
+--~ s = validate(s)
+--~ if s then t[#t+1] = s end
+--~ end
+--~ else
+--~ for s in str:gmatch("([^,]+)") do
+--~ t[#t+1] = s
+--~ end
+--~ end
+--~ return t
+--~ end
+
+function input.aux.splitpathexpr(str, t, validate)
+ -- no need for optimization, only called a few times, we can use lpeg for the sub
+ t = t or { }
+ local concat = table.concat
+ while true do
+ local done = false
+ while true do
+ ok = false
+ str = str:gsub("([^{},]+){([^{}]-)}", function(a,b)
+ local t = { }
+ b:piecewise(",", function(s) t[#t+1] = a .. s end)
+ ok, done = true, true
+ return "{" .. concat(t,",") .. "}"
+ end)
+ if not ok then break end
+ end
+ while true do
+ ok = false
+ str = str:gsub("{([^{}]-)}([^{},]+)", function(a,b)
+ local t = { }
+ a:piecewise(",", function(s) t[#t+1] = s .. b end)
+ ok, done = true, true
+ return "{" .. concat(t,",") .. "}"
+ end)
+ if not ok then break end
+ end
+ while true do
+ ok = false
+ str = str:gsub("([,{]){([^{}]+)}([,}])", function(a,b,c)
+ ok, done = true, true
+ return a .. b .. c
+ end)
+ if not ok then break end
+ end
+ if not done then break end
+ end
+ while true do
+ ok = false
+ str = str:gsub("{([^{}]-)}{([^{}]-)}", function(a,b)
+ local t = { }
+ a:piecewise(",", function(sa)
+ b:piecewise(",", function(sb)
+ t[#t+1] = sa .. sb
+ end)
+ end)
+ ok = true
+ return "{" .. concat(t,",") .. "}"
+ end)
+ if not ok then break end
+ end
+ while true do
+ ok = false
+ str = str:gsub("{([^{}]-)}", function(a)
+ ok = true
+ return a
+ end)
+ if not ok then break end
+ end
+ if validate then
+ str:piecewise(",", function(s)
+ s = validate(s)
+ if s then t[#t+1] = s end
+ end)
+ else
+ str:piecewise(",", function(s)
+ t[#t+1] = s
+ end)
+ end
+ return t
+end
function input.aux.expanded_path(instance,pathlist)
-- a previous version fed back into pathlist
- local i, n, oldlist, newlist, ok = 0, 0, { }, { }, false
+ local newlist, ok = { }, false
for _,v in ipairs(pathlist) do
if v:find("[{}]") then
ok = true
@@ -2924,45 +3295,11 @@ function input.aux.expanded_path(instance,pathlist)
end
end
if ok then
- for _,v in ipairs(pathlist) do
- oldlist[#oldlist+1] = (v:gsub("([\{\}])", function(p)
- if p == "{" then
- i = i + 1
- if i > n then n = i end
- return "<" .. (i-1) .. ">"
- else
- i = i - 1
- return "</" .. i .. ">"
- end
- end))
- end
- for i=1,n do
- while true do
- local more = false
- local pattern = "^(.-)<"..(n-i)..">(.-)</"..(n-i)..">(.-)$"
- local t = { }
- for _,v in ipairs(oldlist) do
- local pre, mid, post = v:match(pattern)
- if pre and mid and post then
- more = true
- for vv in string.gmatch(mid..',',"(.-),") do
- if vv == '.' then
- t[#t+1] = pre..post
- else
- t[#t+1] = pre..vv..post
- end
- end
- else
- t[#t+1] = v
- end
- end
- oldlist = t
- if not more then break end
- end
- end
- for _,v in ipairs(oldlist) do
- v = file.collapse_path(v)
- if v ~= "" and not v:find(instance.dummy_path_expr) then newlist[#newlist+1] = v end
+ for _, v in ipairs(pathlist) do
+ input.aux.splitpathexpr(v, newlist, function(s)
+ s = file.collapse_path(s)
+ return s ~= "" and not s:find(instance.dummy_path_expr) and s
+ end)
end
else
for _,v in ipairs(pathlist) do
@@ -2975,6 +3312,83 @@ function input.aux.expanded_path(instance,pathlist)
return newlist
end
+--~ old one, imperfect and not that efficient
+--~
+--~ function input.aux.expanded_path(instance,pathlist)
+--~ -- a previous version fed back into pathlist
+--~ local i, n, oldlist, newlist, ok = 0, 0, { }, { }, false
+--~ for _,v in ipairs(pathlist) do
+--~ if v:find("[{}]") then
+--~ ok = true
+--~ break
+--~ end
+--~ end
+--~ if ok then
+--~ for _,v in ipairs(pathlist) do
+--~ oldlist[#oldlist+1] = (v:gsub("([\{\}])", function(p)
+--~ if p == "{" then
+--~ i = i + 1
+--~ if i > n then n = i end
+--~ return "<" .. (i-1) .. ">"
+--~ else
+--~ i = i - 1
+--~ return "</" .. i .. ">"
+--~ end
+--~ end))
+--~ end
+--~ for i=1,n do
+--~ while true do
+--~ local more = false
+--~ local pattern = "^(.-)<"..(n-i)..">(.-)</"..(n-i)..">(.-)$"
+--~ local t = { }
+--~ for _,v in ipairs(oldlist) do
+--~ local pre, mid, post = v:match(pattern)
+--~ if pre and mid and post then
+--~ more = true
+--~ for vv in string.gmatch(mid..',',"(.-),") do -- (mid, "([^,]+)")
+--~ if vv == '.' then
+--~ t[#t+1] = pre..post
+--~ else
+--~ t[#t+1] = pre..vv..post
+--~ end
+--~ end
+--~ else
+--~ t[#t+1] = v
+--~ end
+--~ end
+--~ oldlist = t
+--~ if not more then break end
+--~ end
+--~ end
+--~ if true then
+--~ -- many dups are possible due to messy resolve / order can be messed up too, brr !
+--~ local ok = { }
+--~ for _,o in ipairs(oldlist) do
+--~ for v in o:gmatch("([^,]+)") do
+--~ if not ok[v] then
+--~ ok[v] = true
+--~ v = file.collapse_path(v)
+--~ if v ~= "" and not v:find(instance.dummy_path_expr) then newlist[#newlist+1] = v end
+--~ end
+--~ end
+--~ end
+--~ else
+--~ for _,v in ipairs(oldlist) do
+--~ v = file.collapse_path(v)
+--~ if v ~= "" and not v:find(instance.dummy_path_expr) then newlist[#newlist+1] = v end
+--~ end
+--~ end
+--~ else
+--~ for _,v in ipairs(pathlist) do
+--~ for vv in string.gmatch(v..',',"(.-),") do
+--~ vv = file.collapse_path(v)
+--~ if vv ~= "" then newlist[#newlist+1] = vv end
+--~ end
+--~ end
+--~ end
+--~ return newlist
+--~ end
+
--~ function input.is_readable(name) -- brrr, get rid of this
--~ return name:find("^zip##") or file.is_readable(name)
--~ end
@@ -3073,24 +3487,51 @@ function input.suffixes_of_format(str)
end
end
-function input.aux.qualified_path(filename) -- make platform dependent / not good yet
- return
- filename:find("^%.+/") or
- filename:find("^/") or
- filename:find("^%a+%:") or
- filename:find("^%a+##")
-end
+--~ function input.aux.qualified_path(filename) -- make platform dependent / not good yet
+--~ return
+--~ filename:find("^%.+/") or
+--~ filename:find("^/") or
+--~ filename:find("^%a+%:") or
+--~ filename:find("^%a+##")
+--~ end
+
+--~ function input.normalize_name(original)
+--~ -- internally we use type##spec##subspec ; this hackery slightly slows down searching
+--~ local str = original or ""
+--~ str = str:gsub("::", "##") -- :: -> ##
+--~ str = str:gsub("^(%a+)://" ,"%1##") -- zip:// -> zip##
+--~ str = str:gsub("(.+)##(.+)##/(.+)","%1##%2##%3") -- ##/spec -> ##spec
+--~ if (input.trace>1) and (original ~= str) then
+--~ input.logger('= normalizer',original.." -> "..str)
+--~ end
+--~ return str
+--~ end
-function input.normalize_name(original)
- -- internally we use type##spec##subspec ; this hackery slightly slows down searching
- local str = original or ""
- str = str:gsub("::", "##") -- :: -> ##
- str = str:gsub("^(%a+)://" ,"%1##") -- zip:// -> zip##
- str = str:gsub("(.+)##(.+)##/(.+)","%1##%2##%3") -- ##/spec -> ##spec
- if (input.trace>1) and (original ~= str) then
- input.logger('= normalizer',original.." -> "..str)
+do -- called about 700 times for an empty doc (font initializations etc)
+ -- i need to weed the font files for redundant calls
+
+ local letter = lpeg.R("az","AZ")
+ local separator = lpeg.P("##")
+
+ local qualified = lpeg.P(".")^0 * lpeg.P("/") + letter*lpeg.P(":") + letter^1*separator
+ local normalized = lpeg.Cs(
+ (letter^1*(lpeg.P("://")/"##") * (1-lpeg.P(false))^1) +
+ (lpeg.P("::")/"##" + (1-separator)^1*separator*(1-separator)^1*separator*(lpeg.P("/")/"") + 1)^0
+ )
+
+ -- ./name ../name /name c: zip## (todo: use url internally and get rid of ##)
+ function input.aux.qualified_path(filename)
+ return qualified:match(filename)
+ end
+
+ -- zip:// -> zip## ; :: -> ## ; aa##bb##/cc -> aa##bb##cc
+ function input.normalize_name(original)
+ local str = normalized:match(original or "")
+ if input.trace > 1 and original ~= str then
+ input.logger('= normalizer',original.." -> "..str)
+ end
+ return str
end
- return str
end
-- split the next one up, better for jit
@@ -3455,13 +3896,13 @@ function input.automount(instance)
end
function input.load(instance)
- input.start_timing(instance)
+ input.starttiming(instance)
input.identify_cnf(instance)
input.load_cnf(instance)
input.expand_variables(instance)
input.load_hash(instance)
input.automount(instance)
- input.stop_timing(instance)
+ input.stoptiming(instance)
end
function input.for_files(instance, command, files, filetype, mustexist)
@@ -3755,7 +4196,7 @@ being written at the same time is small. We also need to extend
luatools with a recache feature.</p>
--ldx]]--
-caches = caches or { }
+caches = caches or { }
dir = dir or { }
texmf = texmf or { }
@@ -3768,8 +4209,18 @@ caches.tree = false
caches.temp = caches.temp or os.getenv("TEXMFCACHE") or os.getenv("HOME") or os.getenv("HOMEPATH") or os.getenv("VARTEXMF") or os.getenv("TEXMFVAR") or os.getenv("TMP") or os.getenv("TEMP") or os.getenv("TMPDIR") or nil
caches.paths = caches.paths or { caches.temp }
+input.usecache = not toboolean(os.getenv("TEXMFSHARECACHE") or "false",true) -- true
+
+if caches.temp and caches.temp ~= "" and lfs.attributes(caches.temp,"mode") ~= "directory" then
+ if io.ask(string.format("Should I create the cache path %s?",caches.temp), "no", { "yes", "no" }) == "yes" then
+ lfs.mkdir(caches.temp)
+ end
+end
if not caches.temp or caches.temp == "" then
- print("\nFATAL ERROR: NO VALID TEMPORARY PATH\n")
+ print("\nfatal error: there is no valid cache path defined\n")
+ os.exit()
+elseif lfs.attributes(caches.temp,"mode") ~= "directory" then
+ print(string.format("\nfatal error: cache path %s is not a directory\n",caches.temp))
os.exit()
end
@@ -3909,8 +4360,8 @@ do -- local report
function containers.is_valid(container, name)
if name and name ~= "" then
- local cs = container.storage[name]
- return cs and not table.is_empty(cs) and cs.cache_version == container.version
+ local storage = container.storage[name]
+ return storage and not table.is_empty(storage) and storage.cache_version == container.version
else
return false
end
@@ -3956,8 +4407,6 @@ end
-- since we want to use the cache instead of the tree, we will now
-- reimplement the saver.
-input.usecache = true
-
function input.aux.save_data(instance, dataname, check)
for cachename, files in pairs(instance[dataname]) do
local name
@@ -4357,7 +4806,7 @@ else
function input.registerzipfile(instance,zipname,tag)
if not zip.registeredfiles[zipname] then
- input.start_timing(instance)
+ input.starttiming(instance)
local z = zip.open(zipname)
if not z then
zipname = input.find_file(instance,zipname)
@@ -4370,7 +4819,7 @@ else
else
input.logger("? zipfile","unknown "..zipname)
end
- input.stop_timing(instance)
+ input.stoptiming(instance)
end
end
@@ -4476,7 +4925,7 @@ if texconfig and not texlua then
t = {
utftype = u, -- may go away
lines = l,
- current = 0,
+ current = 0, -- line number, not really needed
handle = nil,
noflines = #l,
close = function()
@@ -4484,15 +4933,23 @@ if texconfig and not texlua then
input.show_close(filename)
end,
reader = function(self)
- if not self then self = t end
- if self.current >= #self.lines then
+ self = self or t
+ local current, lines = self.current, self.lines
+ if current >= #lines then
return nil
else
- self.current = self.current + 1
- if input.filters.utf_translator then
- return input.filters.utf_translator(self.lines[t.current])
+ self.current = current + 1
+ local line = lines[self.current]
+ if line == "" then
+ return ""
else
- return self.lines[self.current]
+ local translator = input.filters.utf_translator
+ -- return (translator and translator(line)) or line
+ if translator then
+ return translator(line)
+ else
+ return line
+ end
end
end
end
@@ -4502,13 +4959,15 @@ if texconfig and not texlua then
-- todo: file;name -> freeze / eerste regel scannen -> freeze
t = {
reader = function(self)
- if not self then self = t end -- not used
- if input.filters.dynamic_translator then
- return input.filters.dynamic_translator(file_handle:read())
+ local line = file_handle:read()
+ if line == "" then
+ return ""
elseif input.filters.utf_translator then
- return input.filters.utf_translator(file_handle:read())
+ return input.filters.utf_translator(line)
+ elseif input.filters.dynamic_translator then
+ return input.filters.dynamic_translator(line)
else
- return file_handle:read()
+ return line
end
end,
close = function()
@@ -4745,7 +5204,7 @@ if texconfig and not texlua then
luatex.variablenames = {
'main_memory', 'extra_mem_bot', 'extra_mem_top',
- 'buf_size',
+ 'buf_size','expand_depth',
'font_max', 'font_mem_size',
'hash_extra', 'max_strings', 'pool_free', 'pool_size', 'string_vacancies',
'obj_tab_size', 'pdf_mem_size', 'dest_names_size',
@@ -4920,6 +5379,7 @@ own = { }
own.libs = { -- todo: check which ones are really needed
'l-string.lua',
+ 'l-lpeg.lua',
'l-table.lua',
'l-io.lua',
'l-number.lua',
@@ -4991,7 +5451,7 @@ input.banner = 'LuaTools | '
utils.report = input.report
input.defaultlibs = { -- not all are needed
- 'l-string.lua', 'l-table.lua', 'l-boolean.lua', 'l-number.lua', 'l-unicode.lua',
+ 'l-string.lua', 'l-lpeg.lua', 'l-table.lua', 'l-boolean.lua', 'l-number.lua', 'l-unicode.lua',
'l-md5.lua', 'l-os.lua', 'l-io.lua', 'l-file.lua', 'l-dir.lua', 'l-utils.lua', 'l-tex.lua',
'luat-lib.lua', 'luat-inp.lua', 'luat-tmp.lua', 'luat-zip.lua', 'luat-tex.lua'
}
@@ -5153,7 +5613,8 @@ function input.my_make_format(instance,texname)
flags[#flags+1] = "--lua=" .. string.quote(luaname)
-- flags[#flags+1] = "--progname=" .. instance.progname -- potential fallback
end
- local command = "luatex ".. table.concat(flags," ") .. " " .. string.quote(fullname) .. " \\dump"
+ local bs = (environment.platform == "unix" and "\\\\") or "\\" -- todo: make a function
+ local command = "luatex ".. table.concat(flags," ") .. " " .. string.quote(fullname) .. " " .. bs .. "dump"
input.report("running command: " .. command .. "\n")
os.exec(command)
end
@@ -5163,7 +5624,7 @@ function input.my_make_format(instance,texname)
end
end
-function input.my_run_format(instance,name,data)
+function input.my_run_format(instance,name,data,more)
-- hm, rather old code here; we can now use the file.whatever functions
if name and (name ~= "") then
local barename = name:gsub("%.%a+$","")
@@ -5189,7 +5650,7 @@ function input.my_run_format(instance,name,data)
if f then
f:close()
-- bug, no .fmt !
- local command = "luatex --fmt=" .. string.quote(barename) .. " --lua=" .. string.quote(luaname) .. " " .. string.quote(data)
+ local command = "luatex --fmt=" .. string.quote(barename) .. " --lua=" .. string.quote(luaname) .. " " .. string.quote(data) .. " " .. string.quote(more)
input.report("running command: " .. command)
os.exec(command)
else
@@ -5227,11 +5688,11 @@ elseif environment.arguments["find-path"] then
elseif environment.arguments["run"] then
input.my_prepare_a(instance) -- ! no need for loading databases
input.verbose = true
- input.my_run_format(instance,environment.files[1] or "",environment.files[2] or "")
+ input.my_run_format(instance,environment.files[1] or "",environment.files[2] or "",environment.files[3] or "")
elseif environment.arguments["fmt"] then
input.my_prepare_a(instance) -- ! no need for loading databases
input.verbose = true
- input.my_run_format(instance,environment.arguments["fmt"], environment.files[1] or "")
+ input.my_run_format(instance,environment.arguments["fmt"], environment.files[1] or "",environment.files[2] or "")
elseif environment.arguments["expand-braces"] then
input.my_prepare_a(instance)
input.for_files(instance, input.expand_braces, environment.files)
diff --git a/scripts/context/lua/mtx-babel.lua b/scripts/context/lua/mtx-babel.lua
index 5ef9ae934..c9855b86a 100644
--- a/scripts/context/lua/mtx-babel.lua
+++ b/scripts/context/lua/mtx-babel.lua
@@ -1,6 +1,12 @@
--- data tables by Thomas A. Schmitz
+if not modules then modules = { } end modules ['mtx-babel'] = {
+ version = 1.001,
+ comment = "companion to mtxrun.lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
-dofile(input.find_file(instance,"luat-log.lua"))
+-- data tables by Thomas A. Schmitz
texmf.instance = instance -- we need to get rid of this / maybe current instance in global table
@@ -9,6 +15,10 @@ scripts.babel = scripts.babel or { }
do
+ local converters = { }
+
+ -- greek
+
local replace_01 = { -- <' * |
a = "á¾…",
h = "ᾕ",
@@ -216,6 +226,7 @@ do
O = "Ὁ",
U = "á½™",
W = "Ὡ",
+ R = "Ῥ",
}
local replace_23 = { -- > *
@@ -301,61 +312,111 @@ do
local skips_01 = lpeg.P("\\") * lpeg.R("az", "AZ")^1
local skips_02 = lpeg.P("[") * (1- lpeg.S("[]"))^1 * lpeg.P("]")
- local stage_01 = (lpeg.P("<'") * lpeg.Cs(1) * lpeg.P('|')) / replace_01
- local stage_02 = (lpeg.P(">'") * lpeg.Cs(1) * lpeg.P('|')) / replace_02
- local stage_03 = (lpeg.P("<`") * lpeg.Cs(1) * lpeg.P('|')) / replace_03
- local stage_04 = (lpeg.P(">`") * lpeg.Cs(1) * lpeg.P('|')) / replace_04
- local stage_05 = (lpeg.P("<~") * lpeg.Cs(1) * lpeg.P('|')) / replace_05
- local stage_06 = (lpeg.P(">~") * lpeg.Cs(1) * lpeg.P('|')) / replace_06
- local stage_07 = (lpeg.P('"\'') * lpeg.Cs(1) ) / replace_07
- local stage_08 = (lpeg.P('"`') * lpeg.Cs(1) ) / replace_08
- local stage_09 = (lpeg.P('"~') * lpeg.Cs(1) ) / replace_09
- local stage_10 = (lpeg.P("<'") * lpeg.Cs(1) ) / replace_10
- local stage_11 = (lpeg.P(">'") * lpeg.Cs(1) ) / replace_11
- local stage_12 = (lpeg.P("<`") * lpeg.Cs(1) ) / replace_12
- local stage_13 = (lpeg.P(">`") * lpeg.Cs(1) ) / replace_13
- local stage_14 = (lpeg.P(">~") * lpeg.Cs(1) ) / replace_14
- local stage_15 = (lpeg.P(">~") * lpeg.Cs(1) ) / replace_15
- local stage_16 = (lpeg.P("'") * lpeg.Cs(1) * lpeg.P('|')) / replace_16
- local stage_17 = (lpeg.P("`") * lpeg.Cs(1) * lpeg.P('|')) / replace_17
- local stage_18 = (lpeg.P("~") * lpeg.Cs(1) * lpeg.P('|')) / replace_18
- local stage_19 = (lpeg.P("'") * lpeg.Cs(1) ) / replace_19
- local stage_20 = (lpeg.P("`") * lpeg.Cs(1) ) / replace_20
- local stage_21 = (lpeg.P("~") * lpeg.Cs(1) ) / replace_21
- local stage_22 = (lpeg.P("<") * lpeg.Cs(1) ) / replace_22
- local stage_23 = (lpeg.P(">") * lpeg.Cs(1) ) / replace_23
- local stage_24 = (lpeg.Cs(1) * lpeg.P('|') ) / replace_24
- local stage_25 = (lpeg.P('"') * lpeg.Cs(1) ) / replace_25
- local stage_26 = (lpeg.Cs(1) ) / replace_26
-
- local stages =
- skips_01 + skips_02 +
- stage_01 + stage_02 + stage_03 + stage_04 + stage_05 +
- stage_06 + stage_07 + stage_08 + stage_09 + stage_10 +
- stage_11 + stage_12 + stage_13 + stage_14 + stage_15 +
- stage_16 + stage_17 + stage_18 + stage_19 + stage_20 +
- stage_21 + stage_22 + stage_23 + stage_24 + stage_25 +
- stage_26
-
- local parser = lpeg.Cs((stages + 1)^0)
+ local greek_01 = (lpeg.P("<'") * lpeg.Cs(1) * lpeg.P('|')) / replace_01
+ local greek_02 = (lpeg.P(">'") * lpeg.Cs(1) * lpeg.P('|')) / replace_02
+ local greek_03 = (lpeg.P("<`") * lpeg.Cs(1) * lpeg.P('|')) / replace_03
+ local greek_04 = (lpeg.P(">`") * lpeg.Cs(1) * lpeg.P('|')) / replace_04
+ local greek_05 = (lpeg.P("<~") * lpeg.Cs(1) * lpeg.P('|')) / replace_05
+ local greek_06 = (lpeg.P(">~") * lpeg.Cs(1) * lpeg.P('|')) / replace_06
+ local greek_07 = (lpeg.P('"\'') * lpeg.Cs(1) ) / replace_07
+ local greek_08 = (lpeg.P('"`') * lpeg.Cs(1) ) / replace_08
+ local greek_09 = (lpeg.P('"~') * lpeg.Cs(1) ) / replace_09
+ local greek_10 = (lpeg.P("<'") * lpeg.Cs(1) ) / replace_10
+ local greek_11 = (lpeg.P(">'") * lpeg.Cs(1) ) / replace_11
+ local greek_12 = (lpeg.P("<`") * lpeg.Cs(1) ) / replace_12
+ local greek_13 = (lpeg.P(">`") * lpeg.Cs(1) ) / replace_13
+ local greek_14 = (lpeg.P("<~") * lpeg.Cs(1) ) / replace_14
+ local greek_15 = (lpeg.P(">~") * lpeg.Cs(1) ) / replace_15
+ local greek_16 = (lpeg.P("'") * lpeg.Cs(1) * lpeg.P('|')) / replace_16
+ local greek_17 = (lpeg.P("`") * lpeg.Cs(1) * lpeg.P('|')) / replace_17
+ local greek_18 = (lpeg.P("~") * lpeg.Cs(1) * lpeg.P('|')) / replace_18
+ local greek_19 = (lpeg.P("'") * lpeg.Cs(1) ) / replace_19
+ local greek_20 = (lpeg.P("`") * lpeg.Cs(1) ) / replace_20
+ local greek_21 = (lpeg.P("~") * lpeg.Cs(1) ) / replace_21
+ local greek_22 = (lpeg.P("<") * lpeg.Cs(1) ) / replace_22
+ local greek_23 = (lpeg.P(">") * lpeg.Cs(1) ) / replace_23
+ local greek_24 = (lpeg.Cs(1) * lpeg.P('|') ) / replace_24
+ local greek_25 = (lpeg.P('"') * lpeg.Cs(1) ) / replace_25
+ local greek_26 = (lpeg.Cs(1) ) / replace_26
+
+ local skips =
+ skips_01 + skips_02
+
+ local greek =
+ greek_01 + greek_02 + greek_03 + greek_04 + greek_05 +
+ greek_06 + greek_07 + greek_08 + greek_09 + greek_10 +
+ greek_11 + greek_12 + greek_13 + greek_14 + greek_15 +
+ greek_16 + greek_17 + greek_18 + greek_19 + greek_20 +
+ greek_21 + greek_22 + greek_23 + greek_24 + greek_25 +
+ greek_26
+
+ local spacing = lpeg.S(" \n\r\t")
+ local startgreek = lpeg.P("\\startgreek")
+ local stopgreek = lpeg.P("\\stopgreek")
+ local localgreek = lpeg.P("\\localgreek")
+ local lbrace = lpeg.P("{")
+ local rbrace = lpeg.P("}")
+
+ local documentparser = lpeg.Cs((skips + greek + 1)^0)
+
+ local contextgrammar = lpeg.Cs ( lpeg.P { "scan",
+ ["scan"] = (lpeg.V("global") + lpeg.V("local") + skips + 1)^0,
+ ["global"] = startgreek * ((skips + greek + 1)-stopgreek )^0 ,
+ ["local"] = localgreek * lpeg.V("grouped"),
+ ["grouped"] = spacing^0 * lbrace * (lpeg.V("grouped") + skips + (greek - rbrace))^0 * rbrace,
+ } )
+
+ converters['greek'] = {
+ document = documentparser,
+ context = contextgrammar,
+ }
-- lpeg.print(parser): 254 lines
function scripts.babel.convert(filename)
if filename and filename ~= empty then
- local data = io.loaddata(filename)
- if data then
- data = parser:match(data)
- io.savedata(filename .. ".utf", data)
+ local data = io.loaddata(filename) or ""
+ if data ~= "" then
+ local language = environment.argument("language") or ""
+ if language ~= "" then
+ local converter = converters[language]
+ if converter then
+ local structure = environment.argument("structure") or "document"
+ converter = converter[structure]
+ if converter then
+ input.report(string.format("converting '%s' using language '%s' with structure '%s'", filename, language, structure))
+ data = converter:match(data)
+ local newfilename = filename .. ".utf"
+ io.savedata(newfilename, data)
+ input.report(string.format("converted data saved in '%s'", newfilename))
+ else
+ input.report(string.format("unknown structure '%s' language '%s'", structure, language))
+ end
+ else
+ input.report(string.format("no converter for language '%s'", language))
+ end
+ else
+ input.report(string.format("provide language"))
+ end
+ else
+ input.report(string.format("no data in '%s'",filename))
end
end
end
+ --~ print(contextgrammar:match [[
+ --~ oeps abg \localgreek{a}
+ --~ \startgreek abg \stopgreek \oeps
+ --~ oeps abg \localgreek{a{b}\oeps g}
+ --~ ]])
+
end
-banner = banner .. " | conversion tools "
+banner = banner .. " | babel conversion tools "
messages.help = [[
+--language=string conversion language (e.g. greek)
+--structure=string obey given structure (e.g. 'document', default: 'context')
--convert convert babel codes into utf
]]
@@ -366,3 +427,4 @@ if environment.argument("convert") then
else
input.help(banner,messages.help)
end
+
diff --git a/scripts/context/lua/mtx-cache.lua b/scripts/context/lua/mtx-cache.lua
index 8bd3b7a79..0fdaca6a4 100644
--- a/scripts/context/lua/mtx-cache.lua
+++ b/scripts/context/lua/mtx-cache.lua
@@ -1,4 +1,10 @@
-dofile(input.find_file(instance,"luat-log.lua"))
+if not modules then modules = { } end modules ['mtx-cache'] = {
+ version = 1.001,
+ comment = "companion to mtxrun.lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
texmf.instance = instance -- we need to get rid of this / maybe current instance in global table
diff --git a/scripts/context/lua/mtx-chars.lua b/scripts/context/lua/mtx-chars.lua
index 470846419..77c74cf51 100644
--- a/scripts/context/lua/mtx-chars.lua
+++ b/scripts/context/lua/mtx-chars.lua
@@ -1,4 +1,10 @@
-dofile(input.find_file(instance,"luat-log.lua"))
+if not modules then modules = { } end modules ['mtx-chars'] = {
+ version = 1.001,
+ comment = "companion to mtxrun.lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
texmf.instance = instance -- we need to get rid of this / maybe current instance in global table
diff --git a/scripts/context/lua/mtx-context.lua b/scripts/context/lua/mtx-context.lua
index c444dfd1a..2e7855847 100644
--- a/scripts/context/lua/mtx-context.lua
+++ b/scripts/context/lua/mtx-context.lua
@@ -1,4 +1,10 @@
-dofile(input.find_file(instance,"luat-log.lua"))
+if not modules then modules = { } end modules ['mtx-context'] = {
+ version = 1.001,
+ comment = "companion to mtxrun.lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
texmf.instance = instance -- we need to get rid of this / maybe current instance in global table
@@ -468,9 +474,11 @@ function scripts.context.multipass.makeoptionfile(jobname,ctxdata)
setvalues("usemodules" , "\\usemodule[%s]")
setvalues("environments" , "\\environment %s ")
-- ctx stuff
- setvalues(ctxdata.modes, "\\enablemode[%s]")
- setvalues(ctxdata.modules, "\\usemodule[%s]")
- setvalues(ctxdata.environments, "\\environment %s ")
+ if ctxdata then
+ setvalues(ctxdata.modes, "\\enablemode[%s]")
+ setvalues(ctxdata.modules, "\\usemodule[%s]")
+ setvalues(ctxdata.environments, "\\environment %s ")
+ end
-- done
setalways( "\\protect")
setalways( "\\endinput")
@@ -497,11 +505,12 @@ function scripts.context.multipass.copytuifile(jobname)
end
function scripts.context.run(ctxdata)
- -- todo: interface
-for k,v in pairs(ctxdata.flags) do
- environment.setargument(k,v)
-end
-
+ if ctxdata then
+ -- todo: interface
+ for k,v in pairs(ctxdata.flags) do
+ environment.setargument(k,v)
+ end
+ end
local files = environment.files
if #files > 0 then
input.identify_cnf(instance)
diff --git a/scripts/context/lua/mtx-convert.lua b/scripts/context/lua/mtx-convert.lua
new file mode 100644
index 000000000..c9827c8b7
--- /dev/null
+++ b/scripts/context/lua/mtx-convert.lua
@@ -0,0 +1,86 @@
+if not modules then modules = { } end modules ['mtx-convert'] = {
+ version = 1.001,
+ comment = "companion to mtxrun.lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+do
+
+ graphics = graphics or { }
+ graphics.converters = graphics.converters or { }
+
+ local gsprogram = (os.platform == "windows" and "gswin32c") or "gs"
+ local gstemplate = "%s -q -sDEVICE=pdfwrite -dEPSCrop -dNOPAUSE -dNOCACHE -dBATCH -dAutoRotatePages=/None -dProcessColorModel=/DeviceCMYK -sOutputFile=%s %s -c quit"
+
+ function graphics.converters.epstopdf(inputpath,outputpath,epsname)
+ inputpath = inputpath or "."
+ outputpath = outputpath or "."
+ local oldname = file.join(inputpath,epsname)
+ local newname = file.join(outputpath,file.replacesuffix(epsname,"pdf"))
+ local et = lfs.attributes(oldname,"modification")
+ local pt = lfs.attributes(newname,"modification")
+ if not pt or et > pt then
+ dir.mkdirs(outputpath)
+ local tmpname = file.replacesuffix(newname,"tmp")
+ local command = string.format(gstemplate,gsprogram,tmpname,oldname)
+ os.execute(command)
+ os.remove(newname)
+ os.rename(tmpname,newname)
+ end
+ end
+
+ function graphics.converters.convertpath(inputpath,outputpath)
+ for name in lfs.dir(inputpath or ".") do
+ if name:find("%.$") then
+ -- skip . and ..
+ elseif name:find("%.eps$") then
+ graphics.converters.epstopdf(inputpath,outputpath, name)
+ elseif lfs.attributes(inputpath .. "/".. name,"mode") == "directory" then
+ graphics.converters.convertpath(inputpath .. "/".. name,outputpath .. "/".. name)
+ end
+ end
+ end
+
+end
+
+texmf.instance = instance -- we need to get rid of this / maybe current instance in global table
+
+scripts = scripts or { }
+scripts.convert = scripts.convert or { }
+
+scripts.convert.delay = 5 * 60 -- 5 minutes
+
+function scripts.convert.convertall()
+ local watch = environment.arguments.watch or false
+ local delay = environment.arguments.delay or scripts.convert.delay
+ local input = environment.arguments.inputpath or "."
+ local output = environment.arguments.outputpath or "."
+ while true do
+ graphics.converters.convertpath(input, output)
+ if watch then
+ os.sleep(delay)
+ else
+ break
+ end
+ end
+end
+
+banner = banner .. " | graphic conversion tools "
+
+messages.help = [[
+--convertall convert all graphics on path
+--inputpath=string original graphics path
+--outputpath=string converted graphics path
+--watch watch folders
+--delay time between sweeps
+]]
+
+input.verbose = true
+
+if environment.argument("convertall") then
+ scripts.convert.convertall()
+else
+ input.help(banner,messages.help)
+end
diff --git a/scripts/context/lua/mtx-fonts.lua b/scripts/context/lua/mtx-fonts.lua
index ba5215ab1..395e9764e 100644
--- a/scripts/context/lua/mtx-fonts.lua
+++ b/scripts/context/lua/mtx-fonts.lua
@@ -1,4 +1,11 @@
-dofile(input.find_file(instance,"luat-log.lua"))
+if not modules then modules = { } end modules ['mtx-fonts'] = {
+ version = 1.001,
+ comment = "companion to mtxrun.lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
dofile(input.find_file(instance,"font-syn.lua"))
texmf.instance = instance -- we need to get rid of this / maybe current instance in global table
diff --git a/scripts/context/lua/mtx-watch.lua b/scripts/context/lua/mtx-watch.lua
new file mode 100644
index 000000000..651865ab4
--- /dev/null
+++ b/scripts/context/lua/mtx-watch.lua
@@ -0,0 +1,224 @@
+if not modules then modules = { } end modules ['mtx-watch'] = {
+ version = 1.001,
+ comment = "companion to mtxrun.lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+texmf.instance = instance -- we need to get rid of this / maybe current instance in global table
+
+scripts = scripts or { }
+scripts.watch = scripts.watch or { }
+
+function scripts.watch.watch()
+ local delay = environment.argument("delay") or 5
+ local logpath = environment.argument("logpath") or ""
+ local pipe = environment.argument("pipe") or false
+ if #environment.files > 0 then
+ for _, path in ipairs(environment.files) do
+ logs.report("watch", "watching path ".. path)
+ end
+ local function glob(files,path)
+ for name in lfs.dir(path) do
+ if name:find("^%.") then
+ -- skip . and ..
+ else
+ name = path .. "/" .. name
+ local a = lfs.attributes(name)
+ if not a then
+ -- weird
+ elseif a.mode == "directory" then
+ if name:find("graphics$") or name:find("figures$") or name:find("resources$") then
+ -- skip these too
+ else
+ glob(files,name)
+ end
+ elseif name:find(".%luj$") then
+ files[name] = a.change or a.ctime or a.modification or a.mtime
+ end
+ end
+ end
+ end
+ local n = 0
+ local function process()
+ local done = false
+ for _, path in ipairs(environment.files) do
+ lfs.chdir(path)
+ local files = { }
+ glob(files,path)
+ table.sort(files) -- what gets sorted here
+ for name, time in pairs(files) do
+ --~ local ok, joblog = xpcall(function() return dofile(name) end, function() end )
+ local ok, joblog = pcall(dofile,name)
+ if ok and joblog then
+ if joblog.status == "processing" then
+ logs.report("watch",string.format("aborted job, %s added to queue",name))
+ joblog.status = "queued"
+ io.savedata(name, table.serialize(joblog,true))
+ elseif joblog.status == "queued" then
+ local command = joblog.command
+ if command then
+ local replacements = {
+ inputpath = (joblog.paths and joblog.paths.input ) or ".",
+ outputpath = (joblog.paths and joblog.paths.output) or ".",
+ filename = joblog.filename or "",
+ }
+ command = command:gsub("%%(.-)%%", replacements)
+ if command ~= "" then
+ joblog.status = "processing"
+ joblog.runtime = os.time() -- os.clock()
+ io.savedata(name, table.serialize(joblog,true))
+ logs.report("watch",string.format("running: %s", command))
+ local newpath = file.dirname(name)
+ io.flush()
+ local result = ""
+ if newpath ~= "" and newpath ~= "." then
+ local oldpath = lfs.currentdir()
+ lfs.chdir(newpath)
+ if pipe then result = os.resultof(command) else result = os.execute(command) end
+ lfs.chdir(oldpath)
+ else
+ if pipe then result = os.resultof(command) else result = os.execute(command) end
+ end
+ logs.report("watch",string.format("return value: %s", result))
+ done = true
+ local path, base = replacements.outputpath, file.basename(replacements.filename)
+ joblog.runtime = os.time() - joblog.runtime -- os.clock() - joblog.runtime
+ joblog.result = file.replacesuffix(file.join(path,base),"pdf")
+ joblog.size = lfs.attributes(joblog.result,"size")
+ joblog.status = "finished"
+ else
+ joblog.status = "invalid command"
+ end
+ else
+ joblog.status = "no command"
+ end
+ -- pcall, when error sleep + again
+ io.savedata(name, table.serialize(joblog,true))
+ if logpath ~= "" then
+ local name = string.format("%s/%s%04i%09i.lua", logpath, os.time(), math.floor((os.clock()*100)%1000), math.random(99999999))
+ io.savedata(name, table.serialize(joblog,true))
+ logs.report("watch", "saving joblog ".. name)
+ end
+ end
+ end
+ end
+ end
+ end
+ local function wait()
+ io.flush()
+ if not done then
+ n = n + 1
+ if n >= 10 then
+ logs.report("watch", "still sleeping " .. os.clock())
+ n = 0
+ end
+ os.sleep(delay)
+ end
+ end
+ while true do
+ pcall(process)
+ pcall(wait)
+ end
+ else
+ logs.report("watch", "no paths to watch")
+ end
+end
+
+function scripts.watch.collect_logs(path) -- clean 'm up too
+ path = path or environment.argument("logpath") or ""
+ path = (path == "" and ".") or path
+ local files = dir.globfiles(path,false,"^%d+%.lua$")
+ local collection = { }
+ local valid = table.tohash({"filename","result","runtime","size","status"})
+ for _, name in ipairs(files) do
+ local t = dofile(name)
+ if t and type(t) == "table" and t.status then
+ for k, v in pairs(t) do
+ if not valid[k] then
+ t[k] = nil
+ end
+ end
+ collection[name:gsub("[^%d]","")] = t
+ end
+ end
+ return collection
+end
+
+function scripts.watch.save_logs(collection,path) -- play safe
+ if collection and not table.is_empty(collection) then
+ path = path or environment.argument("logpath") or ""
+ path = (path == "" and ".") or path
+ local filename = string.format("%s/collected-%s.lua",path,tostring(os.time()))
+ io.savedata(filename,table.serialize(collection,true))
+ local check = dofile(filename)
+ for k,v in pairs(check) do
+ if not collection[k] then
+ logs.error("watch", "error in saving file")
+ os.remove(filename)
+ return false
+ end
+ end
+ for k,v in pairs(check) do
+ os.remove(string.format("%s.lua",k))
+ end
+ return true
+ else
+ return false
+ end
+end
+
+function scripts.watch.collect_collections(path) -- removes duplicates
+ path = path or environment.argument("logpath") or ""
+ path = (path == "" and ".") or path
+ local files = dir.globfiles(path,false,"^collected%-%d+%.lua$")
+ local collection = { }
+ for _, name in ipairs(files) do
+ local t = dofile(name)
+ if t and type(t) == "table" then
+ for k, v in pairs(t) do
+ collection[k] = v
+ end
+ end
+ end
+ return collection
+end
+
+function scripts.watch.show_logs(path) -- removes duplicates
+ local collection = scripts.watch.collect_collections(path) or { }
+ local max = 0
+ for k,v in pairs(collection) do
+ v = v.filename or "?"
+ if #v > max then max = #v end
+ end
+ print(max)
+ for k,v in ipairs(table.sortedkeys(collection)) do
+ local c = collection[v]
+ local f, s, r, n = c.filename or "?", c.status or "?", c.runtime or 0, c.size or 0
+ logs.report("watch", string.format("%s %s %3i %8i %s",string.padd(f,max," "),string.padd(s,10," "),r,n,v))
+ end
+end
+
+banner = banner .. " | watchdog"
+
+messages.help = [[
+--logpath optional path for log files
+--watch watch given path
+--pipe use pipe instead of execute
+--delay delay between sweeps
+--collect condense log files
+--showlog show log data
+]]
+
+input.verbose = true
+
+if environment.argument("watch") then
+ scripts.watch.watch()
+elseif environment.argument("collect") then
+ scripts.watch.save_logs(scripts.watch.collect_logs())
+elseif environment.argument("showlog") then
+ scripts.watch.show_logs()
+else
+ input.help(banner,messages.help)
+end
diff --git a/scripts/context/lua/mtxrun.lua b/scripts/context/lua/mtxrun.lua
index baad28e84..d180fa9b9 100644
--- a/scripts/context/lua/mtxrun.lua
+++ b/scripts/context/lua/mtxrun.lua
@@ -1,5 +1,14 @@
#!/usr/bin/env texlua
+if not modules then modules = { } end modules ['mtxrun'] = {
+ version = 1.001,
+ comment = "runner, lua replacement for texmfstart.rb",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+
-- one can make a stub:
--
-- #!/bin/sh
@@ -29,7 +38,7 @@
-- remember for subruns: _CTX_K_S_#{original}_
-- remember for subruns: TEXMFSTART.#{original} [tex.rb texmfstart.rb]
-banner = "version 1.0.1 - 2007+ - PRAGMA ADE / CONTEXT"
+banner = "version 1.0.2 - 2007+ - PRAGMA ADE / CONTEXT"
texlua = true
-- begin library merge
@@ -370,6 +379,49 @@ function string:split_settings() -- no {} handling, see l-aux for lpeg variant
end
+-- filename : l-lpeg.lua
+-- author : Hans Hagen, PRAGMA-ADE, Hasselt NL
+-- copyright: PRAGMA ADE / ConTeXt Development Team
+-- license : see context related readme files
+
+if not versions then versions = { } end versions['l-lpeg'] = 1.001
+
+--~ l-lpeg.lua :
+
+--~ lpeg.digit = lpeg.R('09')^1
+--~ lpeg.sign = lpeg.S('+-')^1
+--~ lpeg.cardinal = lpeg.P(lpeg.sign^0 * lpeg.digit^1)
+--~ lpeg.integer = lpeg.P(lpeg.sign^0 * lpeg.digit^1)
+--~ lpeg.float = lpeg.P(lpeg.sign^0 * lpeg.digit^0 * lpeg.P('.') * lpeg.digit^1)
+--~ lpeg.number = lpeg.float + lpeg.integer
+--~ lpeg.oct = lpeg.P("0") * lpeg.R('07')^1
+--~ lpeg.hex = lpeg.P("0x") * (lpeg.R('09') + lpeg.R('AF'))^1
+--~ lpeg.uppercase = lpeg.P("AZ")
+--~ lpeg.lowercase = lpeg.P("az")
+
+--~ lpeg.eol = lpeg.S('\r\n\f')^1 -- includes formfeed
+--~ lpeg.space = lpeg.S(' ')^1
+--~ lpeg.nonspace = lpeg.P(1-lpeg.space)^1
+--~ lpeg.whitespace = lpeg.S(' \r\n\f\t')^1
+--~ lpeg.nonwhitespace = lpeg.P(1-lpeg.whitespace)^1
+
+function lpeg.anywhere(pattern) --slightly adapted from website
+ return lpeg.P { lpeg.P(pattern) + 1 * lpeg.V(1) }
+end
+
+function lpeg.startswith(pattern) --slightly adapted
+ return lpeg.P(pattern)
+end
+
+--~ g = lpeg.splitter(" ",function(s) ... end) -- gmatch:lpeg = 3:2
+
+function lpeg.splitter(pattern, action)
+ return (((1-lpeg.P(pattern))^1)/action+1)^0
+end
+
+
+
+
-- filename : l-table.lua
-- comment : split off from luat-lib
-- author : Hans Hagen, PRAGMA-ADE, Hasselt NL
@@ -443,6 +495,7 @@ function table.merge(t, ...)
t[k] = v
end
end
+ return t
end
function table.merged(...)
@@ -455,6 +508,25 @@ function table.merged(...)
return tmp
end
+function table.imerge(t, ...)
+ for _, list in ipairs({...}) do
+ for k,v in ipairs(list) do
+ t[#t+1] = v
+ end
+ end
+ return t
+end
+
+function table.imerged(...)
+ local tmp = { }
+ for _, list in ipairs({...}) do
+ for _,v in pairs(list) do
+ tmp[#tmp+1] = v
+ end
+ end
+ return tmp
+end
+
if not table.fastcopy then
function table.fastcopy(old) -- fast one
@@ -1101,6 +1173,38 @@ do
end
+function io.ask(question,default,options)
+ while true do
+ io.write(question)
+ if options then
+ io.write(string.format(" [%s]",table.concat(options,"|")))
+ end
+ if default then
+ io.write(string.format(" [%s]",default))
+ end
+ io.write(string.format(" "))
+ local answer = io.read()
+ answer = answer:gsub("^%s*(.*)%s*$","%1")
+ if answer == "" and default then
+ return default
+ elseif not options then
+ return answer
+ else
+ for _,v in pairs(options) do
+ if v == answer then
+ return answer
+ end
+ end
+ local pattern = "^" .. answer
+ for _,v in pairs(options) do
+ if v:find(pattern) then
+ return v
+ end
+ end
+ end
+ end
+end
+
-- filename : l-md5.lua
-- author : Hans Hagen, PRAGMA-ADE, Hasselt NL
@@ -1117,7 +1221,7 @@ if md5 then do
if not md5.HEX then function md5.HEX(str) return convert(str,"%02X") end end
if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end
- if not md5.dec then function md5.dec(str) return convert(stt,"%03i") end end
+ if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end
end end
@@ -1325,15 +1429,18 @@ dir = { }
if lfs then
function dir.glob_pattern(path,patt,recurse,action)
- for name in lfs.dir(path) do
- local full = path .. '/' .. name
- local mode = lfs.attributes(full,'mode')
- if mode == 'file' then
- if name:find(patt) then
- action(full)
+ local ok, scanner = xpcall(function() return lfs.dir(path) end, function() end) -- kepler safe
+ if ok and type(scanner) == "function" then
+ for name in scanner do
+ local full = path .. '/' .. name
+ local mode = lfs.attributes(full,'mode')
+ if mode == 'file' then
+ if name:find(patt) then
+ action(full)
+ end
+ elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
+ dir.glob_pattern(full,patt,recurse,action)
end
- elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
- dir.glob_pattern(full,patt,recurse,action)
end
end
end
@@ -1358,6 +1465,30 @@ if lfs then
return t
end
+ function dir.globfiles(path,recurse,func,files)
+ if type(func) == "string" then
+ local s = func -- alas, we need this indirect way
+ func = function(name) return name:find(s) end
+ end
+ files = files or { }
+ for name in lfs.dir(path) do
+ if name:find("^%.") then
+ --- skip
+ elseif lfs.attributes(name,'mode') == "directory" then
+ if recurse then
+ dir.globfiles(path .. "/" .. name,recurse,func,files)
+ end
+ elseif func then
+ if func(name) then
+ files[#files+1] = path .. "/" .. name
+ end
+ else
+ files[#files+1] = path .. "/" .. name
+ end
+ end
+ return files
+ end
+
-- t = dir.glob("c:/data/develop/context/sources/**/????-*.tex")
-- t = dir.glob("c:/data/develop/tex/texmf/**/*.tex")
-- t = dir.glob("c:/data/develop/context/texmf/**/*.tex")
@@ -1374,35 +1505,25 @@ if lfs then
--~ mkdirs(".","/a/b/c")
--~ mkdirs("a","b","c")
- function dir.mkdirs(...) -- root,... or ... ; root is not split
- local pth, err = "", false
- for k,v in pairs({...}) do
- if k == 1 then
- if not lfs.isdir(v) then
- -- print("no root path " .. v)
- err = true
- else
- pth = v
- end
- elseif lfs.isdir(pth .. "/" .. v) then
- pth = pth .. "/" .. v
+ function dir.mkdirs(...)
+ local pth, err, lst = "", false, table.concat({...},"/")
+ for _, s in ipairs(lst:split("/")) do
+ if pth == "" then
+ pth = (s == "" and "/") or s
else
- for _,s in pairs(v:split("/")) do
- pth = pth .. "/" .. s
- if not lfs.isdir(pth) then
- ok = lfs.mkdir(pth)
- if not lfs.isdir(pth) then
- err = true
- end
- end
- if err then break end
- end
+ pth = pth .. "/" .. s
+ end
+ if s == "" then
+ -- can be network path
+ elseif not lfs.isdir(pth) then
+ lfs.mkdir(pth)
end
- if err then break end
end
return pth, not err
end
+ dir.makedirs = dir.mkdirs
+
end
@@ -1512,7 +1633,8 @@ xml.xmlns = { }
do
- local parser = lpeg.P(false) -- printing shows that this has no side effects
+ local check = lpeg.P(false)
+ local parse = check
--[[ldx--
<p>The next function associates a namespace prefix with an <l n='url'/>. This
@@ -1524,7 +1646,8 @@ do
--ldx]]--
function xml.registerns(namespace, pattern) -- pattern can be an lpeg
- parser = parser + lpeg.C(lpeg.P(pattern:lower())) / namespace
+ check = check + lpeg.C(lpeg.P(pattern:lower())) / namespace
+ parse = lpeg.P { lpeg.P(check) + 1 * lpeg.V(1) }
end
--[[ldx--
@@ -1538,7 +1661,7 @@ do
--ldx]]--
function xml.checkns(namespace,url)
- local ns = parser:match(url:lower())
+ local ns = parse:match(url:lower())
if ns and namespace ~= ns then
xml.xmlns[namespace] = ns
end
@@ -1556,7 +1679,7 @@ do
--ldx]]--
function xml.resolvens(url)
- return parser:match(url:lower()) or ""
+ return parse:match(url:lower()) or ""
end
--[[ldx--
@@ -1607,11 +1730,15 @@ do
local mt = { __tostring = xml.text }
+ function xml.check_error(top,toclose)
+ return ""
+ end
+
local function add_attribute(namespace,tag,value)
if tag == "xmlns" then
xmlns[#xmlns+1] = xml.resolvens(value)
at[tag] = value
- elseif ns == "xmlns" then
+ elseif namespace == "xmlns" then
xml.checkns(tag,value)
at["xmlns:" .. tag] = value
else
@@ -1623,7 +1750,7 @@ do
dt[#dt+1] = spacing
end
local resolved = (namespace == "" and xmlns[#xmlns]) or nsremap[namespace] or namespace
- top = { ns=namespace or "", nr=resolved, tg=tag, at=at, dt={}, __p__ = stack[#stack] }
+ top = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = stack[#stack] }
setmetatable(top, mt)
dt = top.dt
stack[#stack+1] = top
@@ -1636,9 +1763,9 @@ do
local toclose = remove(stack)
top = stack[#stack]
if #stack < 1 then
- errorstr = string.format("nothing to close with %s", tag)
+ errorstr = string.format("nothing to close with %s %s", tag, xml.check_error(top,toclose) or "")
elseif toclose.tg ~= tag then -- no namespace check
- errorstr = string.format("unable to close %s with %s", toclose.tg, tag)
+ errorstr = string.format("unable to close %s with %s %s", toclose.tg, tag, xml.check_error(top,toclose) or "")
end
dt = top.dt
dt[#dt+1] = toclose
@@ -1654,7 +1781,7 @@ do
top = stack[#stack]
setmetatable(top, mt)
dt = top.dt
- dt[#dt+1] = { ns=namespace or "", nr=resolved, tg=tag, at=at, dt={}, __p__ = top }
+ dt[#dt+1] = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = top }
at = { }
if at.xmlns then
remove(xmlns)
@@ -1743,14 +1870,13 @@ do
-- text + comment + emptyelement + cdata + instruction + lpeg.V("parent"), -- 5.8
-- text + lpeg.V("parent") + emptyelement + comment + cdata + instruction, -- 5.5
-
local grammar = lpeg.P { "preamble",
preamble = utfbom^0 * instruction^0 * (doctype + comment + instruction)^0 * lpeg.V("parent") * trailer,
parent = beginelement * lpeg.V("children")^0 * endelement,
children = text + lpeg.V("parent") + emptyelement + comment + cdata + instruction,
}
- function xml.convert(data, no_root) -- no collapse any more
+ function xml.convert(data, no_root)
stack, top, at, xmlns, errorstr, result = {}, {}, {}, {}, nil, nil
stack[#stack+1] = top
top.dt = { }
@@ -1761,7 +1887,7 @@ do
errorstr = "invalid xml file"
end
if errorstr then
- result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={} } } }
+ result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={}, er = true } }, error = true }
setmetatable(stack, mt)
if xml.error_handler then xml.error_handler("load",errorstr) end
else
@@ -1785,6 +1911,10 @@ do
function. Maybe it will go away (when not used).</p>
--ldx]]--
+ function xml.is_valid(root)
+ return root and root.dt and root.dt[1] and type(root.dt[1]) == "table" and not root.dt[1].er
+ end
+
function xml.package(tag,attributes,data)
local ns, tg = tag:match("^(.-):?([^:]+)$")
local t = { ns = ns, tg = tg, dt = data or "", at = attributes or {} }
@@ -1792,6 +1922,10 @@ do
return t
end
+ function xml.is_valid(root)
+ return root and not root.error
+ end
+
xml.error_handler = (logs and logs.report) or print
end
@@ -1804,16 +1938,18 @@ a filename or a file handle.</p>
function xml.load(filename)
if type(filename) == "string" then
- local root, f = { }, io.open(filename,'r')
+ local f = io.open(filename,'r')
if f then
- root = xml.convert(f:read("*all"))
+ local root = xml.convert(f:read("*all"))
f:close()
+ return root
else
- -- if we want an error: root = xml.convert("")
+ return xml.convert("")
end
- return root -- no nil but an empty table if it fails
- else
+ elseif filename then -- filehandle
return xml.convert(filename:read("*all"))
+ else
+ return xml.convert("")
end
end
@@ -1955,10 +2091,10 @@ do
else
if ats then
-- handle(format("<%s:%s %s/>",ens,etg,table.concat(ats," ")))
- handle("<%" .. ens .. ":" .. etg .. table.concat(ats," ") .. "/>")
+ handle("<" .. ens .. ":" .. etg .. table.concat(ats," ") .. "/>")
else
-- handle(format("<%s:%s/>",ens,etg))
- handle("<%" .. ens .. ":" .. "/>")
+ handle("<" .. ens .. ":" .. "/>")
end
end
else
@@ -2159,9 +2295,20 @@ do
[28] = "has value",
[29] = "fast match",
[30] = "select",
+ [31] = "expression",
[40] = "processing instruction",
}
+ local function make_expression(str)
+ str = str:gsub("@([a-zA-Z%-_]+)", "(a['%1'] or '')")
+ str = str:gsub("position%(%)", "i")
+ str = str:gsub("text%(%)", "t")
+ str = str:gsub("!=", "~=")
+ str = str:gsub("([^=!~<>])=([^=!~<>])", "%1==%2")
+ str = str:gsub("([a-zA-Z%-_]+)%(", "functions.%1(")
+ return str, loadstring(string.format("return function(functions,i,a,t) return %s end", str))()
+ end
+
local map = { }
local space = lpeg.S(' \r\n\t')
@@ -2182,7 +2329,7 @@ do
local bar = lpeg.P('|')
local hat = lpeg.P('^')
local valid = lpeg.R('az', 'AZ', '09') + lpeg.S('_-')
- local name_yes = lpeg.C(valid^1) * colon * lpeg.C(valid^1)
+ local name_yes = lpeg.C(valid^1) * colon * lpeg.C(valid^1 + star) -- permits ns:*
local name_nop = lpeg.C(lpeg.P(true)) * lpeg.C(valid^1)
local name = name_yes + name_nop
local number = lpeg.C((lpeg.S('+-')^0 * lpeg.R('09')^1)) / tonumber
@@ -2202,6 +2349,11 @@ do
local is_value = lbracket * value * rbracket
local is_number = lbracket * number * rbracket
+ local nobracket = 1-(lbracket+rbracket) -- must be improved
+ local is_expression = lbracket * lpeg.C(((lpeg.C(nobracket^1))/make_expression)) * rbracket
+
+ local is_expression = lbracket * (lpeg.C(nobracket^1))/make_expression * rbracket
+
local is_one = name
local is_none = exclam * name
local is_one_of = ((lparent * names * rparent) + morenames)
@@ -2237,6 +2389,9 @@ do
local position = (is_one * is_number ) / function(...) map[#map+1] = { 30, true, ... } end
local dont_position = (is_none * is_number ) / function(...) map[#map+1] = { 30, false, ... } end
+ local expression = (is_one * is_expression)/ function(...) map[#map+1] = { 31, true, ... } end
+ local dont_expression = (is_none * is_expression)/ function(...) map[#map+1] = { 31, false, ... } end
+
local instruction = (instructiontag * text ) / function(...) map[#map+1] = { 40, ... } end
local nothing = (empty ) / function( ) map[#map+1] = { 15 } end -- 15 ?
local crap = (1-slash)^1
@@ -2261,6 +2416,7 @@ do
match_one_of_and_eq + match_one_of_and_ne +
dont_match_and_eq + dont_match_and_ne +
match_and_eq + match_and_ne +
+ dont_expression + expression +
has_attribute + has_value +
dont_match_one_of + match_one_of +
dont_match + match +
@@ -2294,8 +2450,10 @@ do
-- root
return false
end
- elseif #map == 2 and m == 12 and map[2][1] == 20 then
- return { { 29, map[2][2], map[2][3] } }
+ elseif #map == 2 and m == 12 and map[2][1] == 20 then
+ -- return { { 29, map[2][2], map[2][3], map[2][4], map[2][5] } }
+ map[2][1] = 29
+ return { map[2] }
end
if m ~= 11 and m ~= 12 and m ~= 13 and m ~= 14 and m ~= 15 and m ~= 16 then
table.insert(map, 1, { 16 })
@@ -2355,6 +2513,20 @@ do
end
end
+ function xml.xshow(e,...) -- also handy when report is given, use () to isolate first e
+ local t = { ... }
+ local report = (type(t[#t]) == "function" and t[#t]) or fallbackreport
+ if not e then
+ report("<!-- no element -->\n")
+ elseif e.tg then
+ report(tostring(e) .. "\n")
+ else
+ for i=1,#e do
+ report(tostring(e[i]) .. "\n")
+ end
+ end
+ end
+
end
--[[ldx--
@@ -2372,8 +2544,22 @@ advance what we want to do with the found element the handle gets three argument
functions.</p>
--ldx]]--
+xml.functions = { }
+
do
+ local functions = xml.functions
+
+ functions.contains = string.find
+ functions.find = string.find
+ functions.upper = string.upper
+ functions.lower = string.lower
+ functions.number = tonumber
+ functions.boolean = toboolean
+ functions.oneof = function(s,...) -- slow
+ local t = {...} for i=1,#t do if s == t[i] then return true end end return false
+ end
+
function xml.traverse(root,pattern,handle,reverse,index,parent,wildcard)
if not root then -- error
return false
@@ -2402,8 +2588,10 @@ do
local rootdt = root.dt
for k=1,#rootdt do
local e = rootdt[k]
- local ns, tg = e.rn or e.ns, e.tg
- if ns == action[2] and tg == action[3] then
+ local ns, tg = (e.rn or e.ns), e.tg
+ local matched = ns == action[3] and tg == action[4]
+ if not action[2] then matched = not matched end
+ if matched then
if handle(root,rootdt,k) then return false end
end
end
@@ -2416,7 +2604,8 @@ do
end
else
if (command == 16 or command == 12) and index == 1 then -- initial
- wildcard = true
+--~ wildcard = true
+ wildcard = command == 16 -- ok?
index = index + 1
action = pattern[index]
command = action and action[1] or 0 -- something is wrong
@@ -2440,13 +2629,16 @@ do
elseif reverse and index == #pattern then
start, stop, step = stop, start, -1
end
+ local idx = 0
for k=start,stop,step do
local e = rootdt[k]
local ns, tg = e.rn or e.ns, e.tg
if tg then
+ idx = idx + 1
if command == 30 then
- local matched = ns == action[3] and tg == action[4]
- if action[2] then matched = not matched end
+ local tg_a = action[4]
+ if tg == tg_a then matched = ns == action[3] elseif tg_a == '*' then matched, multiple = ns == action[3], true else matched = false end
+ if not action[2] then matched = not matched end
if matched then
n = n + dn
if n == action[5] then
@@ -2463,46 +2655,58 @@ do
else
local matched, multiple = false, false
if command == 20 then -- match
- matched = ns == action[2] and tg == action[3]
- if action[2] then matched = not matched end
+ local tg_a = action[4]
+ if tg == tg_a then matched = ns == action[3] elseif tg_a == '*' then matched, multiple = ns == action[3], true else matched = false end
+ if not action[2] then matched = not matched end
elseif command == 21 then -- match one of
multiple = true
- for i=2,#action,2 do
+ for i=3,#action,2 do
if ns == action[i] and tg == action[i+1] then matched = true break end
end
- if action[2] then matched = not matched end
+ if not action[2] then matched = not matched end
elseif command == 22 then -- eq
- matched = ns == action[3] and tg == action[4]
- if action[2] then matched = not matched end
+ local tg_a = action[4]
+ if tg == tg_a then matched = ns == action[3] elseif tg_a == '*' then matched, multiple = ns == action[3], true else matched = false end
+ if not action[2] then matched = not matched end
matched = matched and e.at[action[6]] == action[7]
elseif command == 23 then -- ne
- matched = ns == action[3] and tg == action[4]
- if action[2] then matched = not matched end
+ local tg_a = action[4]
+ if tg == tg_a then matched = ns == action[3] elseif tg_a == '*' then matched, multiple = ns == action[3], true else matched = false end
+ if not action[2] then matched = not matched end
matched = mached and e.at[action[6]] ~= action[7]
elseif command == 24 then -- one of eq
multiple = true
for i=3,#action-2,2 do
if ns == action[i] and tg == action[i+1] then matched = true break end
end
- if action[2] then matched = not matched end
+ if not action[2] then matched = not matched end
matched = matched and e.at[action[#action-1]] == action[#action]
elseif command == 25 then -- one of ne
multiple = true
for i=3,#action-2,2 do
if ns == action[i] and tg == action[i+1] then matched = true break end
end
- if action[2] then matched = not matched end
+ if not action[2] then matched = not matched end
matched = matched and e.at[action[#action-1]] ~= action[#action]
elseif command == 27 then -- has attribute
- local ans = action[3]
- matched = ns == action[3] and tg == action[4]
- if action[2] then matched = not matched end
+ local tg_a = action[4]
+ if tg == tg_a then matched = ns == action[3] elseif tg_a == '*' then matched, multiple = ns == action[3], true else matched = false end
+ if not action[2] then matched = not matched end
matched = matched and e.at[action[5]]
elseif command == 28 then -- has value
local edt = e.dt
- matched = ns == action[3] and tg == action[4]
- if action[2] then matched = not matched end
+ local tg_a = action[4]
+ if tg == tg_a then matched = ns == action[3] elseif tg_a == '*' then matched, multiple = ns == action[3], true else matched = false end
+ if not action[2] then matched = not matched end
matched = matched and edt and edt[1] == action[5]
+ elseif command == 31 then
+ local edt = e.dt
+ local tg_a = action[4]
+ if tg == tg_a then matched = ns == action[3] elseif tg_a == '*' then matched, multiple = ns == action[3], true else matched = false end
+ if not action[2] then matched = not matched end
+ if matched then
+ matched = action[6](functions,idx,e.at,edt[1])
+ end
end
if matched then -- combine tg test and at test
if index == #pattern then
@@ -2943,28 +3147,33 @@ do
end
end
- function xml.include(xmldata,element,attribute,pathlist,collapse)
- element = element or 'ctx:include'
- attribute = attribute or 'name'
- pathlist = pathlist or { '.' }
- -- todo, check op ri
+ function xml.include(xmldata,pattern,attribute,recursive,findfile)
+ -- parse="text" (default: xml), encoding="" (todo)
+ pattern = pattern or 'include'
+ attribute = attribute or 'href'
local function include(r,d,k)
- local ek = d[k]
- local name = (ek.at and ek.at[attribute]) or ""
- if name ~= "" then
- -- maybe file lookup in tree
- local fullname
- for _, path in ipairs(pathlist) do
- if path == '.' then
- fullname = name
- else
- fullname = file.join(path,name)
- end
- local f = io.open(fullname)
+ local ek, name = d[k], nil
+ if ek.at then
+ for a in attribute:gmatch("([^|]+)") do
+ name = ek.at[a]
+ if name then break end
+ end
+ end
+ if name then
+ name = (findfile and findfile(name)) or name
+ if name ~= "" then
+ local f = io.open(name)
if f then
- xml.assign(d,k,xml.load(f,collapse))
+ if ek.at["parse"] == "text" then -- for the moment hard coded
+ d[k] = xml.escaped(f:read("*all"))
+ else
+ local xi = xml.load(f)
+ if recursive then
+ xml.include(xi,pattern,attribute,recursive,findfile)
+ end
+ xml.assign(d,k,xi)
+ end
f:close()
- break
else
xml.empty(d,k)
end
@@ -2973,7 +3182,7 @@ do
xml.empty(d,k)
end
end
- while xml.each_element(xmldata, element, include) do end
+ xml.each_element(xmldata, pattern, include)
end
function xml.strip_whitespace(root, pattern)
@@ -3041,6 +3250,20 @@ do
end)
end
+ function xml.filters.found(root,pattern,check_content)
+ local found = false
+ traverse(root, lpath(pattern), function(r,d,k)
+ if check_content then
+ local dk = d and d[k]
+ found = dk and dk.dt and next(dk.dt) and true
+ else
+ found = true
+ end
+ return true
+ end)
+ return found
+ end
+
end
--[[ldx--
@@ -3054,6 +3277,7 @@ xml.index = xml.filters.index
xml.position = xml.filters.index
xml.first = xml.filters.first
xml.last = xml.filters.last
+xml.found = xml.filters.found
xml.each = xml.each_element
xml.process = xml.process_element
@@ -3102,12 +3326,46 @@ function xml.serialize_path(root,lpath,handle)
xml.serialize(dk,handle)
end
-xml.escapes = { ['&'] = '&amp;', ['<'] = '&lt;', ['>'] = '&gt;', ['"'] = '&quot;' }
-xml.unescapes = { } for k,v in pairs(xml.escapes) do xml.unescapes[v] = k end
+--~ xml.escapes = { ['&'] = '&amp;', ['<'] = '&lt;', ['>'] = '&gt;', ['"'] = '&quot;' }
+--~ xml.unescapes = { } for k,v in pairs(xml.escapes) do xml.unescapes[v] = k end
-function xml.escaped (str) return str:gsub("(.)" , xml.escapes ) end
-function xml.unescaped(str) return str:gsub("(&.-;)", xml.unescapes) end
-function xml.cleansed (str) return str:gsub("<.->" , '' ) end -- "%b<>"
+--~ function xml.escaped (str) return str:gsub("(.)" , xml.escapes ) end
+--~ function xml.unescaped(str) return str:gsub("(&.-;)", xml.unescapes) end
+--~ function xml.cleansed (str) return str:gsub("<.->" , '' ) end -- "%b<>"
+
+do
+
+ -- 100 * 2500 * "oeps< oeps> oeps&" : gsub:lpeg|lpeg|lpeg
+ --
+ -- 1021:0335:0287:0247
+
+ -- 10 * 1000 * "oeps< oeps> oeps& asfjhalskfjh alskfjh alskfjh alskfjh ;al J;LSFDJ"
+ --
+ -- 1559:0257:0288:0190 (last one suggested by roberto)
+
+ -- escaped = lpeg.Cs((lpeg.S("<&>") / xml.escapes + 1)^0)
+ -- escaped = lpeg.Cs((lpeg.S("<")/"&lt;" + lpeg.S(">")/"&gt;" + lpeg.S("&")/"&amp;" + 1)^0)
+ local normal = (1 - lpeg.S("<&>"))^0
+ local special = lpeg.P("<")/"&lt;" + lpeg.P(">")/"&gt;" + lpeg.P("&")/"&amp;"
+ local escaped = lpeg.Cs(normal * (special * normal)^0)
+
+ -- 100 * 1000 * "oeps&lt; oeps&gt; oeps&amp;" : gsub:lpeg == 0153:0280:0151:0080 (last one by roberto)
+
+ -- unescaped = lpeg.Cs((lpeg.S("&lt;")/"<" + lpeg.S("&gt;")/">" + lpeg.S("&amp;")/"&" + 1)^0)
+ -- unescaped = lpeg.Cs((((lpeg.P("&")/"") * (lpeg.P("lt")/"<" + lpeg.P("gt")/">" + lpeg.P("amp")/"&") * (lpeg.P(";")/"")) + 1)^0)
+ local normal = (1 - lpeg.S"&")^0
+ local special = lpeg.P("&lt;")/"<" + lpeg.P("&gt;")/">" + lpeg.P("&amp;")/"&"
+ local unescaped = lpeg.Cs(normal * (special * normal)^0)
+
+ -- 100 * 5000 * "oeps <oeps bla='oeps' foo='bar'> oeps </oeps> oeps " : gsub:lpeg == 623:501 msec (short tags, less difference)
+
+ local cleansed = lpeg.Cs(((lpeg.P("<") * (1-lpeg.P(">"))^0 * lpeg.P(">"))/"" + 1)^0)
+
+ function xml.escaped (str) return escaped :match(str) end
+ function xml.unescaped(str) return unescaped:match(str) end
+ function xml.cleansed (str) return cleansed :match(str) end
+
+end
function xml.join(t,separator,lastseparator)
if #t > 0 then
@@ -3193,6 +3451,33 @@ end end
--~ xml.lshow("/../../../a/!(b|c)[@d='e']/f")
--~ xml.lshow("/../../../a/!b[@d!='e']/f")
+--~ x = xml.convert([[
+--~ <a>
+--~ <b n='01'>01</b>
+--~ <b n='02'>02</b>
+--~ <b n='03'>03</b>
+--~ <b n='04'>OK</b>
+--~ <b n='05'>05</b>
+--~ <b n='06'>06</b>
+--~ <b n='07'>ALSO OK</b>
+--~ </a>
+--~ ]])
+
+--~ xml.trace_lpath = true
+
+--~ xml.xshow(xml.first(x,"b[position() > 2 and position() < 5 and text() == 'ok']"))
+--~ xml.xshow(xml.first(x,"b[position() > 2 and position() < 5 and text() == upper('ok')]"))
+--~ xml.xshow(xml.first(x,"b[@n=='03' or @n=='08']"))
+--~ xml.xshow(xml.all (x,"b[number(@n)>2 and number(@n)<6]"))
+--~ xml.xshow(xml.first(x,"b[find(text(),'ALSO')]"))
+
+--~ str = [[
+--~ <?xml version="1.0" encoding="utf-8"?>
+--~ <story line='mojca'>
+--~ <windows>my secret</mouse>
+--~ </story>
+--~ ]]
+
-- filename : l-utils.lua
-- comment : split off from luat-lib
@@ -3500,6 +3785,7 @@ end
-- Beware, loading and saving is overloaded in luat-tmp!
-- todo: instances.[hashes,cnffiles,configurations,522] -> ipairs (alles check, sneller)
+-- todo: check escaping in find etc, too much, too slow
if not versions then versions = { } end versions['luat-inp'] = 1.001
if not environment then environment = { } end
@@ -3737,7 +4023,7 @@ input.settrace(tonumber(os.getenv("MTX.INPUT.TRACE") or os.getenv("MTX_INPUT_TRA
-- These functions can be used to test the performance, especially
-- loading the database files.
-function input.start_timing(instance)
+function input.starttiming(instance)
if instance then
instance.starttime = os.clock()
if not instance.loadtime then
@@ -3746,7 +4032,7 @@ function input.start_timing(instance)
end
end
-function input.stop_timing(instance, report)
+function input.stoptiming(instance, report)
if instance and instance.starttime then
instance.stoptime = os.clock()
local loadtime = instance.stoptime - instance.starttime
@@ -3760,9 +4046,6 @@ function input.stop_timing(instance, report)
end
end
-input.stoptiming = input.stop_timing
-input.starttiming = input.start_timing
-
function input.elapsedtime(instance)
return string.format("%0.3f",instance.loadtime or 0)
end
@@ -4075,99 +4358,106 @@ function input.generatedatabase(instance,specification)
return input.methodhandler('generators', instance, specification)
end
-function input.generators.tex(instance,specification)
- local tag = specification
- if not instance.lsrmode and lfs and lfs.dir then
- input.report("scanning path",specification)
- instance.files[tag] = { }
- local files = instance.files[tag]
- local n, m, r = 0, 0, 0
- local spec = specification .. '/'
- local attributes = lfs.attributes
- local directory = lfs.dir
- local small = instance.smallcache
- local function action(path)
- local mode, full
- if path then
- full = spec .. path .. '/'
- else
- full = spec
- end
- for name in directory(full) do
- if name:find("^%.") then
- -- skip
- elseif name:find("[%~%`%!%#%$%%%^%&%*%(%)%=%{%}%[%]%:%;\"\'%|%|%<%>%,%?\n\r\t]") then
- -- texio.write_nl("skipping " .. name)
- -- skip
+do
+
+ local weird = lpeg.anywhere(lpeg.S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+
+ function input.generators.tex(instance,specification)
+ local tag = specification
+ if not instance.lsrmode and lfs and lfs.dir then
+ input.report("scanning path",specification)
+ instance.files[tag] = { }
+ local files = instance.files[tag]
+ local n, m, r = 0, 0, 0
+ local spec = specification .. '/'
+ local attributes = lfs.attributes
+ local directory = lfs.dir
+ local small = instance.smallcache
+ local function action(path)
+ local mode, full
+ if path then
+ full = spec .. path .. '/'
else
- mode = attributes(full..name,'mode')
- if mode == "directory" then
- m = m + 1
- if path then
- action(path..'/'..name)
- else
- action(name)
- end
- elseif path and mode == 'file' then
- n = n + 1
- local f = files[name]
- if f then
- if not small then
- if type(f) == 'string' then
- files[name] = { f, path }
- else
- f[#f+1] = path
- end
+ full = spec
+ end
+ for name in directory(full) do
+ if name:find("^%.") then
+ -- skip
+ -- elseif name:find("[%~%`%!%#%$%%%^%&%*%(%)%=%{%}%[%]%:%;\"\'%|%<%>%,%?\n\r\t]") then -- too much escaped
+ elseif weird:match(name) then
+ -- texio.write_nl("skipping " .. name)
+ -- skip
+ else
+ mode = attributes(full..name,'mode')
+ if mode == "directory" then
+ m = m + 1
+ if path then
+ action(path..'/'..name)
+ else
+ action(name)
end
- else
- files[name] = path
- local lower = name:lower()
- if name ~= lower then
- files["remap:"..lower] = name
- r = r + 1
+ elseif path and mode == 'file' then
+ n = n + 1
+ local f = files[name]
+ if f then
+ if not small then
+ if type(f) == 'string' then
+ files[name] = { f, path }
+ else
+ f[#f+1] = path
+ end
+ end
+ else
+ files[name] = path
+ local lower = name:lower()
+ if name ~= lower then
+ files["remap:"..lower] = name
+ r = r + 1
+ end
end
end
end
end
end
- end
- action()
- input.report(string.format("%s files found on %s directories with %s uppercase remappings",n,m,r))
- else
- local fullname = file.join(specification,input.lsrname)
- local path = '.'
- local f = io.open(fullname)
- if f then
- instance.files[tag] = { }
- local files = instance.files[tag]
- local small = instance.smallcache
- input.report("loading lsr file",fullname)
- -- for line in f:lines() do -- much slower then the next one
- for line in (f:read("*a")):gmatch("(.-)\n") do
- if line:find("^[%a%d]") then
- local fl = files[line]
- if fl then
- if not small then
- if type(fl) == 'string' then
- files[line] = { fl, path } -- table
- else
- fl[#fl+1] = path
+ action()
+ input.report(string.format("%s files found on %s directories with %s uppercase remappings",n,m,r))
+ else
+ local fullname = file.join(specification,input.lsrname)
+ local path = '.'
+ local f = io.open(fullname)
+ if f then
+ instance.files[tag] = { }
+ local files = instance.files[tag]
+ local small = instance.smallcache
+ input.report("loading lsr file",fullname)
+ -- for line in f:lines() do -- much slower then the next one
+ for line in (f:read("*a")):gmatch("(.-)\n") do
+ if line:find("^[%a%d]") then
+ local fl = files[line]
+ if fl then
+ if not small then
+ if type(fl) == 'string' then
+ files[line] = { fl, path } -- table
+ else
+ fl[#fl+1] = path
+ end
+ end
+ else
+ files[line] = path -- string
+ local lower = line:lower()
+ if line ~= lower then
+ files["remap:"..lower] = line
end
end
else
- files[line] = path -- string
- local lower = line:lower()
- if line ~= lower then
- files["remap:"..lower] = line
- end
+ path = line:match("%.%/(.-)%:$") or path -- match could be nil due to empty line
end
- else
- path = line:match("%.%/(.-)%:$") or path -- match could be nil due to empty line
end
+ f:close()
end
- f:close()
end
end
+
end
-- savers, todo
@@ -4590,10 +4880,168 @@ end
-- a,b,c/{p,q,r}/d/{x,y,z}//
-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+
+-- this one is better and faster, but it took me a while to realize
+-- that this kind of replacement is cleaner than messy parsing and
+-- fuzzy concatenating we can probably gain a bit with selectively
+-- applying lpeg, but experiments with lpeg parsing this proved not to
+-- work that well; the parsing is ok, but dealing with the resulting
+-- table is a pain because we need to work inside-out recursively
+
+--~ function input.aux.splitpathexpr(str, t, validate)
+--~ -- no need for optimization, only called a few times, we can use lpeg for the sub
+--~ t = t or { }
+--~ while true do
+--~ local done = false
+--~ while true do
+--~ ok = false
+--~ str = str:gsub("([^{},]+){([^{}]-)}", function(a,b)
+--~ local t = { }
+--~ for s in b:gmatch("([^,]+)") do
+--~ t[#t+1] = a .. s
+--~ end
+--~ ok, done = true, true
+--~ return "{" .. table.concat(t,",") .. "}"
+--~ end)
+--~ if not ok then break end
+--~ end
+--~ while true do
+--~ ok = false
+--~ str = str:gsub("{([^{}]-)}([^{},]+)", function(a,b)
+--~ local t = { }
+--~ for s in a:gmatch("([^,]+)") do
+--~ t[#t+1] = s .. b
+--~ end
+--~ ok, done = true, true
+--~ return "{" .. table.concat(t,",") .. "}"
+--~ end)
+--~ if not ok then break end
+--~ end
+--~ while true do
+--~ ok = false
+--~ str = str:gsub("([,{]){([^{}]+)}([,}])", function(a,b,c)
+--~ ok, done = true, true
+--~ return a .. b .. c
+--~ end)
+--~ if not ok then break end
+--~ end
+--~ if not done then break end
+--~ end
+--~ while true do
+--~ ok = false
+--~ str = str:gsub("{([^{}]-)}{([^{}]-)}", function(a,b)
+--~ local t = { }
+--~ for sa in a:gmatch("([^,]+)") do
+--~ for sb in b:gmatch("([^,]+)") do
+--~ t[#t+1] = sa .. sb
+--~ end
+--~ end
+--~ ok = true
+--~ return "{" .. table.concat(t,",") .. "}"
+--~ end)
+--~ if not ok then break end
+--~ end
+--~ while true do
+--~ ok = false
+--~ str = str:gsub("{([^{}]-)}", function(a)
+--~ ok = true
+--~ return a
+--~ end)
+--~ if not ok then break end
+--~ end
+--~ if validate then
+--~ for s in str:gmatch("([^,]+)") do
+--~ s = validate(s)
+--~ if s then t[#t+1] = s end
+--~ end
+--~ else
+--~ for s in str:gmatch("([^,]+)") do
+--~ t[#t+1] = s
+--~ end
+--~ end
+--~ return t
+--~ end
+
+function input.aux.splitpathexpr(str, t, validate)
+ -- no need for optimization, only called a few times, we can use lpeg for the sub
+ t = t or { }
+ local concat = table.concat
+ while true do
+ local done = false
+ while true do
+ ok = false
+ str = str:gsub("([^{},]+){([^{}]-)}", function(a,b)
+ local t = { }
+ b:piecewise(",", function(s) t[#t+1] = a .. s end)
+ ok, done = true, true
+ return "{" .. concat(t,",") .. "}"
+ end)
+ if not ok then break end
+ end
+ while true do
+ ok = false
+ str = str:gsub("{([^{}]-)}([^{},]+)", function(a,b)
+ local t = { }
+ a:piecewise(",", function(s) t[#t+1] = s .. b end)
+ ok, done = true, true
+ return "{" .. concat(t,",") .. "}"
+ end)
+ if not ok then break end
+ end
+ while true do
+ ok = false
+ str = str:gsub("([,{]){([^{}]+)}([,}])", function(a,b,c)
+ ok, done = true, true
+ return a .. b .. c
+ end)
+ if not ok then break end
+ end
+ if not done then break end
+ end
+ while true do
+ ok = false
+ str = str:gsub("{([^{}]-)}{([^{}]-)}", function(a,b)
+ local t = { }
+ a:piecewise(",", function(sa)
+ b:piecewise(",", function(sb)
+ t[#t+1] = sa .. sb
+ end)
+ end)
+ ok = true
+ return "{" .. concat(t,",") .. "}"
+ end)
+ if not ok then break end
+ end
+ while true do
+ ok = false
+ str = str:gsub("{([^{}]-)}", function(a)
+ ok = true
+ return a
+ end)
+ if not ok then break end
+ end
+ if validate then
+ str:piecewise(",", function(s)
+ s = validate(s)
+ if s then t[#t+1] = s end
+ end)
+ else
+ str:piecewise(",", function(s)
+ t[#t+1] = s
+ end)
+ end
+ return t
+end
function input.aux.expanded_path(instance,pathlist)
-- a previous version fed back into pathlist
- local i, n, oldlist, newlist, ok = 0, 0, { }, { }, false
+ local newlist, ok = { }, false
for _,v in ipairs(pathlist) do
if v:find("[{}]") then
ok = true
@@ -4601,45 +5049,11 @@ function input.aux.expanded_path(instance,pathlist)
end
end
if ok then
- for _,v in ipairs(pathlist) do
- oldlist[#oldlist+1] = (v:gsub("([\{\}])", function(p)
- if p == "{" then
- i = i + 1
- if i > n then n = i end
- return "<" .. (i-1) .. ">"
- else
- i = i - 1
- return "</" .. i .. ">"
- end
- end))
- end
- for i=1,n do
- while true do
- local more = false
- local pattern = "^(.-)<"..(n-i)..">(.-)</"..(n-i)..">(.-)$"
- local t = { }
- for _,v in ipairs(oldlist) do
- local pre, mid, post = v:match(pattern)
- if pre and mid and post then
- more = true
- for vv in string.gmatch(mid..',',"(.-),") do
- if vv == '.' then
- t[#t+1] = pre..post
- else
- t[#t+1] = pre..vv..post
- end
- end
- else
- t[#t+1] = v
- end
- end
- oldlist = t
- if not more then break end
- end
- end
- for _,v in ipairs(oldlist) do
- v = file.collapse_path(v)
- if v ~= "" and not v:find(instance.dummy_path_expr) then newlist[#newlist+1] = v end
+ for _, v in ipairs(pathlist) do
+ input.aux.splitpathexpr(v, newlist, function(s)
+ s = file.collapse_path(s)
+ return s ~= "" and not s:find(instance.dummy_path_expr) and s
+ end)
end
else
for _,v in ipairs(pathlist) do
@@ -4652,6 +5066,83 @@ function input.aux.expanded_path(instance,pathlist)
return newlist
end
+--~ old one, imperfect and not that efficient
+--~
+--~ function input.aux.expanded_path(instance,pathlist)
+--~ -- a previous version fed back into pathlist
+--~ local i, n, oldlist, newlist, ok = 0, 0, { }, { }, false
+--~ for _,v in ipairs(pathlist) do
+--~ if v:find("[{}]") then
+--~ ok = true
+--~ break
+--~ end
+--~ end
+--~ if ok then
+--~ for _,v in ipairs(pathlist) do
+--~ oldlist[#oldlist+1] = (v:gsub("([\{\}])", function(p)
+--~ if p == "{" then
+--~ i = i + 1
+--~ if i > n then n = i end
+--~ return "<" .. (i-1) .. ">"
+--~ else
+--~ i = i - 1
+--~ return "</" .. i .. ">"
+--~ end
+--~ end))
+--~ end
+--~ for i=1,n do
+--~ while true do
+--~ local more = false
+--~ local pattern = "^(.-)<"..(n-i)..">(.-)</"..(n-i)..">(.-)$"
+--~ local t = { }
+--~ for _,v in ipairs(oldlist) do
+--~ local pre, mid, post = v:match(pattern)
+--~ if pre and mid and post then
+--~ more = true
+--~ for vv in string.gmatch(mid..',',"(.-),") do -- (mid, "([^,]+)")
+--~ if vv == '.' then
+--~ t[#t+1] = pre..post
+--~ else
+--~ t[#t+1] = pre..vv..post
+--~ end
+--~ end
+--~ else
+--~ t[#t+1] = v
+--~ end
+--~ end
+--~ oldlist = t
+--~ if not more then break end
+--~ end
+--~ end
+--~ if true then
+--~ -- many dups are possible due to messy resolve / order can be messed up too, brr !
+--~ local ok = { }
+--~ for _,o in ipairs(oldlist) do
+--~ for v in o:gmatch("([^,]+)") do
+--~ if not ok[v] then
+--~ ok[v] = true
+--~ v = file.collapse_path(v)
+--~ if v ~= "" and not v:find(instance.dummy_path_expr) then newlist[#newlist+1] = v end
+--~ end
+--~ end
+--~ end
+--~ else
+--~ for _,v in ipairs(oldlist) do
+--~ v = file.collapse_path(v)
+--~ if v ~= "" and not v:find(instance.dummy_path_expr) then newlist[#newlist+1] = v end
+--~ end
+--~ end
+--~ else
+--~ for _,v in ipairs(pathlist) do
+--~ for vv in string.gmatch(v..',',"(.-),") do
+--~ vv = file.collapse_path(v)
+--~ if vv ~= "" then newlist[#newlist+1] = vv end
+--~ end
+--~ end
+--~ end
+--~ return newlist
+--~ end
+
--~ function input.is_readable(name) -- brrr, get rid of this
--~ return name:find("^zip##") or file.is_readable(name)
--~ end
@@ -4750,24 +5241,51 @@ function input.suffixes_of_format(str)
end
end
-function input.aux.qualified_path(filename) -- make platform dependent / not good yet
- return
- filename:find("^%.+/") or
- filename:find("^/") or
- filename:find("^%a+%:") or
- filename:find("^%a+##")
-end
+--~ function input.aux.qualified_path(filename) -- make platform dependent / not good yet
+--~ return
+--~ filename:find("^%.+/") or
+--~ filename:find("^/") or
+--~ filename:find("^%a+%:") or
+--~ filename:find("^%a+##")
+--~ end
+
+--~ function input.normalize_name(original)
+--~ -- internally we use type##spec##subspec ; this hackery slightly slows down searching
+--~ local str = original or ""
+--~ str = str:gsub("::", "##") -- :: -> ##
+--~ str = str:gsub("^(%a+)://" ,"%1##") -- zip:// -> zip##
+--~ str = str:gsub("(.+)##(.+)##/(.+)","%1##%2##%3") -- ##/spec -> ##spec
+--~ if (input.trace>1) and (original ~= str) then
+--~ input.logger('= normalizer',original.." -> "..str)
+--~ end
+--~ return str
+--~ end
+
+do -- called about 700 times for an empty doc (font initializations etc)
+ -- i need to weed the font files for redundant calls
+
+ local letter = lpeg.R("az","AZ")
+ local separator = lpeg.P("##")
+
+ local qualified = lpeg.P(".")^0 * lpeg.P("/") + letter*lpeg.P(":") + letter^1*separator
+ local normalized = lpeg.Cs(
+ (letter^1*(lpeg.P("://")/"##") * (1-lpeg.P(false))^1) +
+ (lpeg.P("::")/"##" + (1-separator)^1*separator*(1-separator)^1*separator*(lpeg.P("/")/"") + 1)^0
+ )
+
+ -- ./name ../name /name c: zip## (todo: use url internally and get rid of ##)
+ function input.aux.qualified_path(filename)
+ return qualified:match(filename)
+ end
-function input.normalize_name(original)
- -- internally we use type##spec##subspec ; this hackery slightly slows down searching
- local str = original or ""
- str = str:gsub("::", "##") -- :: -> ##
- str = str:gsub("^(%a+)://" ,"%1##") -- zip:// -> zip##
- str = str:gsub("(.+)##(.+)##/(.+)","%1##%2##%3") -- ##/spec -> ##spec
- if (input.trace>1) and (original ~= str) then
- input.logger('= normalizer',original.." -> "..str)
+ -- zip:// -> zip## ; :: -> ## ; aa##bb##/cc -> aa##bb##cc
+ function input.normalize_name(original)
+ local str = normalized:match(original or "")
+ if input.trace > 1 and original ~= str then
+ input.logger('= normalizer',original.." -> "..str)
+ end
+ return str
end
- return str
end
-- split the next one up, better for jit
@@ -5132,13 +5650,13 @@ function input.automount(instance)
end
function input.load(instance)
- input.start_timing(instance)
+ input.starttiming(instance)
input.identify_cnf(instance)
input.load_cnf(instance)
input.expand_variables(instance)
input.load_hash(instance)
input.automount(instance)
- input.stop_timing(instance)
+ input.stoptiming(instance)
end
function input.for_files(instance, command, files, filetype, mustexist)
@@ -5432,7 +5950,7 @@ being written at the same time is small. We also need to extend
luatools with a recache feature.</p>
--ldx]]--
-caches = caches or { }
+caches = caches or { }
dir = dir or { }
texmf = texmf or { }
@@ -5444,9 +5962,20 @@ caches.trace = false
caches.tree = false
caches.temp = caches.temp or os.getenv("TEXMFCACHE") or os.getenv("HOME") or os.getenv("HOMEPATH") or os.getenv("VARTEXMF") or os.getenv("TEXMFVAR") or os.getenv("TMP") or os.getenv("TEMP") or os.getenv("TMPDIR") or nil
caches.paths = caches.paths or { caches.temp }
+caches.force = false
+input.usecache = not toboolean(os.getenv("TEXMFSHARECACHE") or "false",true) -- true
+
+if caches.temp and caches.temp ~= "" and lfs.attributes(caches.temp,"mode") ~= "directory" then
+ if caches.force or io.ask(string.format("Should I create the cache path %s?",caches.temp), "no", { "yes", "no" }) == "yes" then
+ lfs.mkdirs(caches.temp)
+ end
+end
if not caches.temp or caches.temp == "" then
- print("\nFATAL ERROR: NO VALID TEMPORARY PATH\n")
+ print("\nfatal error: there is no valid cache path defined\n")
+ os.exit()
+elseif lfs.attributes(caches.temp,"mode") ~= "directory" then
+ print(string.format("\nfatal error: cache path %s is not a directory\n",caches.temp))
os.exit()
end
@@ -5633,8 +6162,6 @@ end
-- since we want to use the cache instead of the tree, we will now
-- reimplement the saver.
-input.usecache = true
-
function input.aux.save_data(instance, dataname, check)
for cachename, files in pairs(instance[dataname]) do
local name
@@ -5868,12 +6395,143 @@ if lua.bytecode then -- from 0 upwards
end
+if not modules then modules = { } end modules ['luat-log'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--[[ldx--
+<p>This is a prelude to a more extensive logging module. For the sake
+of parsing log files, in addition to the standard logging we will
+provide an <l n='xml'/> structured file. Actually, any logging that
+is hooked into callbacks will be \XML\ by default.</p>
+--ldx]]--
+
+input = input or { }
+logs = logs or { }
+
+--[[ldx--
+<p>This looks pretty ugly but we need to speed things up a bit.</p>
+--ldx]]--
+
+logs.levels = {
+ ['error'] = 1,
+ ['warning'] = 2,
+ ['info'] = 3,
+ ['debug'] = 4
+}
+
+logs.functions = {
+ 'error', 'warning', 'info', 'debug', 'report',
+ 'start', 'stop', 'push', 'pop'
+}
+
+logs.callbacks = {
+ 'start_page_number',
+ 'stop_page_number',
+ 'report_output_pages',
+ 'report_output_log'
+}
+
+logs.xml = logs.xml or { }
+logs.tex = logs.tex or { }
+
+logs.level = 0
+
+do
+ local write_nl, write, format = texio.write_nl or print, texio.write or io.write, string.format
+
+ if texlua then
+ write_nl = print
+ write = io.write
+ end
+
+ function logs.xml.debug(category,str)
+ if logs.level > 3 then write_nl(format("<d category='%s'>%s</d>",category,str)) end
+ end
+ function logs.xml.info(category,str)
+ if logs.level > 2 then write_nl(format("<i category='%s'>%s</i>",category,str)) end
+ end
+ function logs.xml.warning(category,str)
+ if logs.level > 1 then write_nl(format("<w category='%s'>%s</w>",category,str)) end
+ end
+ function logs.xml.error(category,str)
+ if logs.level > 0 then write_nl(format("<e category='%s'>%s</e>",category,str)) end
+ end
+ function logs.xml.report(category,str)
+ write_nl(format("<r category='%s'>%s</r>",category,str))
+ end
+
+ function logs.xml.start() if logs.level > 0 then tw("<%s>" ) end end
+ function logs.xml.stop () if logs.level > 0 then tw("</%s>") end end
+ function logs.xml.push () if logs.level > 0 then tw("<!-- ") end end
+ function logs.xml.pop () if logs.level > 0 then tw(" -->" ) end end
+
+ function logs.tex.debug(category,str)
+ if logs.level > 3 then write_nl(format("debug >> %s: %s" ,category,str)) end
+ end
+ function logs.tex.info(category,str)
+ if logs.level > 2 then write_nl(format("info >> %s: %s" ,category,str)) end
+ end
+ function logs.tex.warning(category,str)
+ if logs.level > 1 then write_nl(format("warning >> %s: %s",category,str)) end
+ end
+ function logs.tex.error(category,str)
+ if logs.level > 0 then write_nl(format("error >> %s: %s" ,category,str)) end
+ end
+ function logs.tex.report(category,str)
+ write_nl(format("report >> %s: %s" ,category,str))
+ end
+
+ function logs.set_level(level)
+ logs.level = logs.levels[level] or level
+ end
+
+ function logs.set_method(method)
+ for _, v in pairs(logs.functions) do
+ logs[v] = logs[method][v] or function() end
+ end
+ if callback and input[method] then
+ for _, cb in pairs(logs.callbacks) do
+ callback.register(cb, input[method][cb])
+ end
+ end
+ end
+
+ function logs.xml.start_page_number()
+ write_nl(format("<p real='%s' page='%s' sub='%s'", tex.count[0], tex.count[1], tex.count[2]))
+ end
+
+ function logs.xml.stop_page_number()
+ write("/>")
+ write_nl("")
+ end
+
+ function logs.xml.report_output_pages(p,b)
+ write_nl(format("<v k='pages' v='%s'/>", p))
+ write_nl(format("<v k='bytes' v='%s'/>", b))
+ write_nl("")
+ end
+
+ function logs.xml.report_output_log()
+ end
+
+end
+
+logs.set_level('error')
+logs.set_method('tex')
+
+
-- end library merge
own = { }
own.libs = { -- todo: check which ones are really needed
'l-string.lua',
+ 'l-lpeg.lua',
'l-table.lua',
'l-io.lua',
'l-md5.lua',
@@ -5892,6 +6550,7 @@ own.libs = { -- todo: check which ones are really needed
-- 'luat-tex.lua',
-- 'luat-kps.lua',
'luat-tmp.lua',
+ 'luat-log.lua',
}
-- We need this hack till luatex is fixed.
@@ -5953,22 +6612,32 @@ instance.lsrmode = environment.argument("lsr") or false
-- use os.env or environment when available
-function os.setenv(key,value)
- -- todo
-end
+--~ function input.check_environment(tree)
+--~ input.report('')
+--~ os.setenv('TMP', os.getenv('TMP') or os.getenv('TEMP') or os.getenv('TMPDIR') or os.getenv('HOME'))
+--~ if os.platform == 'linux' then
+--~ os.setenv('TEXOS', os.getenv('TEXOS') or 'texmf-linux')
+--~ elseif os.platform == 'windows' then
+--~ os.setenv('TEXOS', os.getenv('TEXOS') or 'texmf-windows')
+--~ elseif os.platform == 'macosx' then
+--~ os.setenv('TEXOS', os.getenv('TEXOS') or 'texmf-macosx')
+--~ end
+--~ os.setenv('TEXOS', string.gsub(string.gsub(os.getenv('TEXOS'),"^[\\\/]*", ''),"[\\\/]*$", ''))
+--~ os.setenv('TEXPATH', string.gsub(tree,"\/+$",''))
+--~ os.setenv('TEXMFOS', os.getenv('TEXPATH') .. "/" .. os.getenv('TEXOS'))
+--~ input.report('')
+--~ input.report("preset : TEXPATH => " .. os.getenv('TEXPATH'))
+--~ input.report("preset : TEXOS => " .. os.getenv('TEXOS'))
+--~ input.report("preset : TEXMFOS => " .. os.getenv('TEXMFOS'))
+--~ input.report("preset : TMP => " .. os.getenv('TMP'))
+--~ input.report('')
+--~ end
function input.check_environment(tree)
input.report('')
os.setenv('TMP', os.getenv('TMP') or os.getenv('TEMP') or os.getenv('TMPDIR') or os.getenv('HOME'))
- if os.platform == 'linux' then
- os.setenv('TEXOS', os.getenv('TEXOS') or 'texmf-linux')
- elseif os.platform == 'windows' then
- os.setenv('TEXOS', os.getenv('TEXOS') or 'texmf-windows')
- elseif os.platform == 'macosx' then
- os.setenv('TEXOS', os.getenv('TEXOS') or 'texmf-macosx')
- end
- os.setenv('TEXOS', string.gsub(string.gsub(os.getenv('TEXOS'),"^[\\\/]*", ''),"[\\\/]*$", ''))
- os.setenv('TEXPATH', string.gsub(tree,"\/+$",''))
+ os.setenv('TEXOS', os.getenv('TEXOS') or ("texmf-" .. os.currentplatform()))
+ os.setenv('TEXPATH', (tree or "tex"):gsub("\/+$",''))
os.setenv('TEXMFOS', os.getenv('TEXPATH') .. "/" .. os.getenv('TEXOS'))
input.report('')
input.report("preset : TEXPATH => " .. os.getenv('TEXPATH'))
@@ -5985,23 +6654,25 @@ function input.load_environment(name) -- todo: key=value as well as lua
if line:find("^[%%%#]") then
-- skip comment
else
- local key, how, value = line:match("^(.-)%s*([%<%=%>%?]+)%s*(.*)%s*$")
- value = value:gsub("^%%(.+)%%$", function(v) return os.getenv(v) or "" end)
- if how == "=" or how == "<<" then
- os.setenv(key,value)
- elseif how == "?" or how == "??" then
- os.setenv(key,os.getenv(key) or value)
- elseif how == "<" or how == "+=" then
- if os.getenv(key) then
- os.setenv(key,os.getenv(key) .. io.fileseparator .. value)
- else
- os.setenv(key,value)
- end
- elseif how == ">" or how == "=+" then
- if os.getenv(key) then
- os.setenv(key,value .. io.pathseparator .. os.getenv(key))
- else
- os.setenv(key,value)
+ local key, how, value = line:match("^(.-)%s*([<=>%?]+)%s*(.*)%s*$")
+ if how then
+ value = value:gsub("%%(.-)%%", function(v) return os.getenv(v) or "" end)
+ if how == "=" or how == "<<" then
+ os.setenv(key,value)
+ elseif how == "?" or how == "??" then
+ os.setenv(key,os.getenv(key) or value)
+ elseif how == "<" or how == "+=" then
+ if os.getenv(key) then
+ os.setenv(key,os.getenv(key) .. io.fileseparator .. value)
+ else
+ os.setenv(key,value)
+ end
+ elseif how == ">" or how == "=+" then
+ if os.getenv(key) then
+ os.setenv(key,value .. io.pathseparator .. os.getenv(key))
+ else
+ os.setenv(key,value)
+ end
end
end
end
@@ -6013,7 +6684,7 @@ end
function input.load_tree(tree)
if tree and tree ~= "" then
local setuptex = 'setuptex.tmf'
- if lfs.attributes(tree, mode) == "directory" then -- check if not nil
+ if lfs.attributes(tree, "mode") == "directory" then -- check if not nil
setuptex = tree .. "/" .. setuptex
else
setuptex = tree
@@ -6094,6 +6765,26 @@ function file.savechecksum(name, checksum)
return nil
end
+function os.currentplatform()
+ local currentplatform = "linux"
+ if os.platform == "windows" then
+ currentplatform = "mswin"
+ else
+ local architecture = os.resultof("uname -m")
+ local unixvariant = os.resultof("uname -s")
+ if architecture and architecture:find("x86_64") then
+ currentplatform = "linux-64"
+ elseif unixvariant and unixvariant:find("Darwin") then
+ if architecture and architecture:find("i386") then
+ currentplatform = "osx-intel"
+ else
+ currentplatform = "osx-ppc"
+ end
+ end
+ end
+ return currentplatform
+end
+
-- it starts here
input.runners = { }
@@ -6401,6 +7092,27 @@ function input.runners.edit_script(instance,filename)
end
end
+function input.runners.save_script_session(filename, list)
+ local t = { }
+ for _, key in ipairs(list) do
+ t[key] = environment.arguments[key]
+ end
+ io.savedata(filename,table.serialize(t,true))
+end
+
+function input.runners.load_script_session(filename)
+ if lfs.isfile(filename) then
+ local t = io.loaddata(filename)
+ if t then
+ t = loadstring(t)
+ if t then t = t() end
+ for key, value in pairs(t) do
+ environment.arguments[key] = value
+ end
+ end
+ end
+end
+
input.runners.launchers = {
windows = { },
unix = { }
@@ -6448,6 +7160,15 @@ function input.runners.launch_file(instance,filename)
end
function input.runners.execute_ctx_script(instance,filename)
+ local function found(name)
+ local path = file.dirname(name)
+ if path and path ~= "" then
+ return false
+ else
+ local fullname = own and own.path and file.join(own.path,name)
+ return io.exists(fullname) and fullname
+ end
+ end
local before, after = environment.split_arguments(filename)
local suffix = ""
if not filename:find("%.lua$") then suffix = ".lua" end
@@ -6458,17 +7179,17 @@ function input.runners.execute_ctx_script(instance,filename)
-- mtx-<filename>
if not fullname or fullname == "" then
fullname = "mtx-" .. filename .. suffix
- fullname = input.find_file(instance,fullname)
+ fullname = found(fullname) and input.find_file(instance,fullname)
end
-- mtx-<filename>s
if not fullname or fullname == "" then
fullname = "mtx-" .. filename .. "s" .. suffix
- fullname = input.find_file(instance,fullname)
+ fullname = found(fullname) and input.find_file(instance,fullname)
end
-- mtx-<filename minus trailing s>
if not fullname or fullname == "" then
fullname = "mtx-" .. filename:gsub("s$","") .. suffix
- fullname = input.find_file(instance,fullname)
+ fullname = found(fullname) and input.find_file(instance,fullname)
end
-- that should do it
if fullname and fullname ~= "" then
@@ -6480,8 +7201,23 @@ function input.runners.execute_ctx_script(instance,filename)
elseif state == "run" then
arg = { } for _,v in pairs(after) do arg[#arg+1] = v end
environment.initialize_arguments(arg)
+local loadname = environment.arguments['load']
+if loadname then
+ if type(loadname) ~= "string" then loadname = file.basename(fullname) end
+ loadname = file.replacesuffix(loadname,"cfg")
+ input.runners.load_script_session(loadname)
+end
filename = environment.files[1]
+ if input.verbose then
+ input.report("using script: " .. fullname)
+ end
dofile(fullname)
+local savename = environment.arguments['save']
+if savename and input.runners.save_list and not table.is_empty(input.runners.save_list or { }) then
+ if type(savename) ~= "string" then savename = file.basename(fullname) end
+ savename = file.replacesuffix(savename,"cfg")
+ input.runners.save_script_session(savename, input.runners.save_list)
+end
return true
end
else
diff --git a/scripts/context/lua/scite-ctx.lua b/scripts/context/lua/scite-ctx.lua
deleted file mode 100644
index 82f8599b1..000000000
--- a/scripts/context/lua/scite-ctx.lua
+++ /dev/null
@@ -1,924 +0,0 @@
--- version : 1.0.0 - 07/2005
--- author : Hans Hagen - PRAGMA ADE - www.pragma-ade.com
--- copyright : public domain or whatever suits
--- remark : part of the context distribution
-
--- todo: name space for local functions
-
--- loading: scite-ctx.properties
-
--- # environment variable
--- #
--- # CTXSPELLPATH=t:/spell
--- #
--- # auto language detection
--- #
--- # % version =1.0 language=uk
--- # <?xml version='1.0' language='uk' ?>
-
--- ext.lua.startup.script=$(SciteDefaultHome)/scite-ctx.lua
---
--- # extension.$(file.patterns.context)=scite-ctx.lua
--- # extension.$(file.patterns.example)=scite-ctx.lua
---
--- # ext.lua.reset=1
--- # ext.lua.auto.reload=1
--- # ext.lua.startup.script=t:/lua/scite-ctx.lua
---
--- ctx.menulist.default=\
--- wrap=wrap_text|\
--- unwrap=unwrap_text|\
--- sort=sort_text|\
--- document=document_text|\
--- quote=quote_text|\
--- compound=compound_text|\
--- check=check_text
---
--- ctx.spellcheck.language=auto
--- ctx.spellcheck.wordsize=4
--- ctx.spellcheck.wordpath=ENV(CTXSPELLPATH)
---
--- ctx.spellcheck.wordfile.all=spell-uk.txt,spell-nl.txt
---
--- ctx.spellcheck.wordfile.uk=spell-uk.txt
--- ctx.spellcheck.wordfile.nl=spell-nl.txt
--- ctx.spellcheck.wordsize.uk=4
--- ctx.spellcheck.wordsize.nl=4
---
--- command.name.21.*=CTX Action List
--- command.subsystem.21.*=3
--- command.21.*=show_menu $(ctx.menulist.default)
--- command.groupundo.21.*=yes
--- command.shortcut.21.*=Shift+F11
---
--- command.name.22.*=CTX Check Text
--- command.subsystem.22.*=3
--- command.22.*=check_text
--- command.groupundo.22.*=yes
--- command.shortcut.22.*=Ctrl+L
---
--- command.name.23.*=CTX Wrap Text
--- command.subsystem.23.*=3
--- command.23.*=wrap_text
--- command.groupundo.23.*=yes
--- command.shortcut.23.*=Ctrl+M
---
--- # command.21.*=check_text
--- # command.21.*=dofile e:\context\lua\scite-ctx.lua
-
--- generic functions
-
-local crlf = "\n"
-
-function traceln(str)
- trace(str .. crlf)
- io.flush()
-end
-
-table.len = table.getn
-table.join = table.concat
-
-function table.found(tab, str)
- local l, r, p
- if string.len(str) == 0 then
- return false
- else
- l, r = 1, table.len(tab)
- while l <= r do
- p = math.floor((l+r)/2)
- if str < tab[p] then
- r = p - 1
- elseif str > tab[p] then
- l = p + 1
- else
- return true
- end
- end
- return false
- end
-end
-
-function string.grab(str, delimiter)
- local list = {}
- for snippet in string.gfind(str,delimiter) do
- table.insert(list, snippet)
- end
- return list
-end
-
-function string.join(list, delimiter)
- local size, str = table.len(list), ''
- if size > 0 then
- str = list[1]
- for i = 2, size, 1 do
- str = str .. delimiter .. list[i]
- end
- end
- return str
-end
-
-function string.spacy(str)
- if string.find(str,"^%s*$") then
- return true
- else
- return false
- end
-end
-
-function string.alphacmp(a,b,i) -- slow but ok
- if i and i > 0 then
- return string.lower(string.gsub(string.sub(a,i),'0',' ')) < string.lower(string.gsub(string.sub(b,i),'0',' '))
- else
- return string.lower(a) < string.lower(b)
- end
-end
-
-function table.alphasort(list,i)
- table.sort(list, function(a,b) return string.alphacmp(a,b,i) end)
-end
-
-function io.exists(filename)
- local ok, result, message = pcall(io.open,filename)
- if result then
- io.close(result)
- return true
- else
- return false
- end
-end
-
-function os.envvar(str)
- if os.getenv(str) ~= '' then
- return os.getenv(str)
- elseif os.getenv(string.upper(str)) ~= '' then
- return os.getenv(string.upper(str))
- elseif os.getenv(string.lower(str)) ~= '' then
- return os.getenv(string.lower(str))
- else
- return ''
- end
-end
-
-function string.expand(str)
- return string.gsub(str, "ENV%((%w+)%)", os.envvar)
-end
-
-function string.strip(str)
- return string.gsub(string.gsub(str,"^%s+",''),"%s+$",'')
-end
-
-function string.replace(original,pattern,replacement)
- local str = string.gsub(original,pattern,replacement)
--- print(str) -- indirect, since else str + nofsubs
- return str -- indirect, since else str + nofsubs
-end
-
--- support functions, maybe editor namespace
-
--- function column_of_position(position)
--- local line = editor:LineFromPosition(position)
--- local oldposition = editor.CurrentPos
--- local column = 0
--- editor:GotoPos(position)
--- while editor.CurrentPos ~= 0 and line == editor:LineFromPosition(editor.CurrentPos) do
--- editor:CharLeft()
--- column = column + 1
--- end
--- editor:GotoPos(oldposition)
--- if line > 0 then
--- return column -1
--- else
--- return column
--- end
--- end
-
--- function line_of_position(position)
--- return editor:LineFromPosition(position)
--- end
-
-function extend_to_start()
- local selectionstart = editor.SelectionStart
- local selectionend = editor.SelectionEnd
- local line = editor:LineFromPosition(selectionstart)
- if line > 0 then
- while line == editor:LineFromPosition(selectionstart-1) do
- selectionstart = selectionstart - 1
- editor:SetSel(selectionstart,selectionend)
- end
- else
- selectionstart = 0
- end
- editor:SetSel(selectionstart,selectionend)
- return selectionstart
-end
-
-function extend_to_end() -- editor:LineEndExtend() does not work
- local selectionstart = editor.SelectionStart
- local selectionend = editor.SelectionEnd
- local line = editor:LineFromPosition(selectionend)
- while line == editor:LineFromPosition(selectionend+1) do
- selectionend = selectionend + 1
- editor:SetSel(selectionstart,selectionend)
- end
- editor:SetSel(selectionstart,selectionend)
- return selectionend
-end
-
-function getfiletype()
- local firstline = editor:GetLine(0)
- if editor.Lexer == SCLEX_TEX then
- return 'tex'
- elseif editor.Lexer == SCLEX_XML then
- return 'xml'
- elseif string.find(firstline,"^%%") then
- return 'tex'
- elseif string.find(firstline,"^<%?xml") then
- return 'xml'
- else
- return 'unknown'
- end
-end
-
--- inspired by LuaExt's scite_Files
-
-function get_dir_list(mask)
- local f
- if props['PLAT_GTK'] and props['PLAT_GTK'] ~= "" then
- f = io.popen('ls -1 ' .. mask)
- else
- mask = string.gsub(mask, '/','\\')
- local tmpfile = 'scite-ctx.tmp'
- local cmd = 'dir /b "' .. mask .. '" > ' .. tmpfile
- os.execute(cmd)
- f = io.open(tmpfile)
- end
- local files = {}
- if not f then -- path check added
- return files
- end
- for line in f:lines() do
- table.insert(files, line)
- end
- f:close()
- return files
-end
-
--- banner
-
-print("loading scite-ctx.lua definition file")
-print("")
-print("- see scite-ctx.properties for configuring info")
-print("")
-print("- ctx.spellcheck.wordpath set to " .. props['ctx.spellcheck.wordpath'])
-if string.find(string.lower(props['ctx.spellcheck.wordpath']), "ctxspellpath") then
- if os.getenv('ctxspellpath') then
- print("- ctxspellpath set to " .. os.getenv('CTXSPELLPATH'))
- else
- print("- 'ctxspellpath is not set")
- end
- print("- ctx.spellcheck.wordpath expands to " .. string.expand(props['ctx.spellcheck.wordpath']))
-end
-print("")
-print("- ctx.wraptext.length is set to " .. props['ctx.wraptext.length'])
-if props['ctx.helpinfo'] ~= '' then
- print("- key bindings:")
- print("")
- print(string.replace(string.strip(props['ctx.helpinfo']),"%s*\|%s*","\n")) -- indirect, since else str + nofsubs
-end
-print("")
-print("- recognized first lines:")
-print("")
-print("xml <?xml version='1.0' language='nl'")
-print("tex % language=nl")
-
-
--- text functions
-
--- written while listening to Talk Talk
-
-local magicstring = string.rep("<ctx-crlf/>", 2)
-
-function wrap_text()
-
- -- We always go to the end of a line, so in fact some of
- -- the variables set next are not needed.
-
- local length = props["ctx.wraptext.length"]
-
- if length == '' then length = 80 else length = tonumber(length) end
-
- local startposition = editor.SelectionStart
- local endposition = editor.SelectionEnd
-
- if startposition == endposition then return end
-
- editor:LineEndExtend()
-
- startposition = editor.SelectionStart
- endposition = editor.SelectionEnd
-
- -- local startline = line_of_position(startposition)
- -- local endline = line_of_position(endposition)
- -- local startcolumn = column_of_position(startposition)
- -- local endcolumn = column_of_position(endposition)
- --
- -- editor:SetSel(startposition,endposition)
-
- local startline = props['SelectionStartLine']
- local endline = props['SelectionEndLine']
- local startcolumn = props['SelectionStartColumn'] - 1
- local endcolumn = props['SelectionEndColumn'] - 1
-
- local indentation = string.rep(' ', startcolumn)
- local selection = string.gsub(editor:GetSelText(),"[\n\r][\n\r]", "\n")
- local selection = string.gsub(selection,"\n\n+", ' ' .. magicstring .. ' ')
- local replacement = ''
- local templine = ''
-
- selection = string.gsub(selection,"^%s", '')
-
- for snippet in string.gfind(selection, "%S+") do
- if snippet == magicstring then
- replacement = replacement .. templine .. "\n\n"
- templine = ''
- elseif string.len(templine) + string.len(snippet) > length then
- replacement = replacement .. templine .. "\n"
- templine = indentation .. snippet
- elseif string.len(templine) == 0 then
- templine = indentation .. snippet
- else
- templine = string.len(templine) .. ' ' .. snippet
- end
- end
-
- replacement = replacement .. templine
- replacement = string.gsub(replacement, "^%s+", '')
-
- if endcolumn == 0 then
- replacement = replacement .. "\n"
- end
-
- editor:ReplaceSel(replacement)
-
-end
-
-function unwrap_text()
-
- local startposition = editor.SelectionStart
- local endposition = editor.SelectionEnd
-
- if startposition == endposition then return end
-
- editor:HomeExtend()
- editor:LineEndExtend()
-
- startposition = editor.SelectionStart
- endposition = editor.SelectionEnd
-
- local magicstring = string.rep("<multiplelines/>", 2)
- local selection = string.gsub(editor:GetSelText(),"[\n\r][\n\r]+", ' ' .. magicstring .. ' ')
- local replacement = ''
-
- for snippet in string.gfind(selection, "%S+") do
- if snippet == magicstring then
- replacement = replacement .. "\n"
- else
- replacement = replacement .. snippet .. "\n"
- end
- end
-
- if endcolumn == 0 then replacement = replacement .. "\n" end
-
- editor:ReplaceSel(replacement)
-
-end
-
-function sort_text()
-
- local startposition = editor.SelectionStart
- local endposition = editor.SelectionEnd
-
- if startposition == endposition then return end
-
- -- local startcolumn = column_of_position(startposition)
- -- local endcolumn = column_of_position(endposition)
- --
- -- editor:SetSel(startposition,endposition)
-
- local startline = props['SelectionStartLine']
- local endline = props['SelectionEndLine']
- local startcolumn = props['SelectionStartColumn'] - 1
- local endcolumn = props['SelectionEndColumn'] - 1
-
- startposition = extend_to_start()
- endposition = extend_to_end()
-
- local selection = string.gsub(editor:GetSelText(), "%s*$", '')
-
- list = string.grab(selection,"[^\n\r]+")
- table.alphasort(list, startcolumn)
- local replacement = table.concat(list, "\n")
-
- editor:GotoPos(startposition)
- editor:SetSel(startposition,endposition)
-
- if endcolumn == 0 then replacement = replacement .. "\n" end
-
- editor:ReplaceSel(replacement)
-
-end
-
-function document_text()
-
- local startposition = editor.SelectionStart
- local endposition = editor.SelectionEnd
-
- if startposition == endposition then return end
-
- startposition = extend_to_start()
- endposition = extend_to_end()
-
- editor:SetSel(startposition,endposition)
-
- local filetype = getfiletype()
-
- local replacement = ''
- for i = editor:LineFromPosition(startposition), editor:LineFromPosition(endposition) do
- local str = editor:GetLine(i)
- if filetype == 'xml' then
- if string.find(str,"^<%!%-%- .* %-%->%s*$") then
- replacement = replacement .. string.gsub(str,"^<%!%-%- (.*) %-%->(%s*)$", "%1\n")
- elseif not string.spacy(str) then
- replacement = replacement .. '<!-- ' .. string.gsub(str,"(%s*)$", '') .. " -->\n"
- else
- replacement = replacement .. str
- end
- else
- if string.find(str,"^%%D%s+$") then
- replacement = replacement .. "\n"
- elseif string.find(str,"^%%D ") then
- replacement = replacement .. string.gsub(str,"^%%D ", '')
- else
- replacement = replacement .. '%D ' .. str
- end
- end
- end
-
- editor:ReplaceSel(string.gsub(replacement, "[\n\r]$", ''))
-
-end
-
-function quote_text()
-
- local filetype, leftquotation, rightquotation = getfiletype(), '', ''
-
- if filetype == 'xml' then
- leftquotation, rightquotation = "<quotation>", "</quotation>"
- leftquote, rightquote = "<quotation>", "</quote>"
- else
- leftquotation, rightquotation = "\\quotation {", "}"
- leftquote, rightquote = "\\quote {", "}"
- end
-
- local replacement = editor:GetSelText()
- replacement = string.gsub(replacement, "\`\`(.-)\'\'", leftquotation .. "%1" .. rightquotation)
- replacement = string.gsub(replacement, "\"(.-)\"", leftquotation .. "%1" .. rightquotation)
- replacement = string.gsub(replacement, "\`(.-)\'", leftquote .. "%1" .. rightquote )
- replacement = string.gsub(replacement, "\'(.-)\'", leftquote .. "%1" .. rightquote )
- editor:ReplaceSel(replacement)
-
-end
-
-function compound_text()
-
- local filetype = getfiletype()
-
- if filetype == 'xml' then
- editor:ReplaceSel(string.gsub(editor:GetSelText(),"(>[^<%-][^<%-]+)([-\/])(%w%w+)","%1<compound token='%2'/>%3"))
- else
- editor:ReplaceSel(string.gsub(editor:GetSelText(),"([^\|])([-\/]+)([^\|])","%1|%2|%3"))
- end
-
-end
-
--- written while listening to Alanis Morissette's acoustic
--- Jagged Little Pill and Tori Amos' Beekeeper after
--- reinstalling on my good old ATH-7
-
-local language = props["ctx.spellcheck.language"]
-local wordsize = props["ctx.spellcheck.wordsize"]
-local wordpath = props["ctx.spellcheck.wordpath"]
-
-if language == '' then language = 'uk' end
-if wordsize == '' then wordsize = 4 else wordsize = tonumber(wordsize) end
-
-local wordfile = ""
-local wordlist = {}
-local worddone = 0
-
--- we use wordlist as a hash so that we can add entries without the
--- need to sort and also use a fast (built in) search
-
--- function kpsewhich_file(filename,filetype,progname)
--- local progflag, typeflag = '', ''
--- local tempname = os.tmpname()
--- if progname then
--- progflag = " --progname=" .. progname .. " "
--- end
--- if filetype then
--- typeflag = " --format=" .. filetype .. " "
--- end
--- local command = "kpsewhich" .. progflag .. typeflag .. " " .. filename .. " > " .. tempname
--- os.execute(command)
--- for line in io.lines(tempname) do
--- return string.gsub(line, "\s*$", '')
--- end
--- end
-
-function check_text()
-
- local dlanguage = props["ctx.spellcheck.language"]
- local dwordsize = props["ctx.spellcheck.wordsize"]
- local dwordpath = props["ctx.spellcheck.wordpath"]
-
- if dlanguage ~= '' then dlanguage = tostring(language) end
- if dwordsize ~= '' then dwordsize = tonumber(wordsize) end
-
- local firstline, skipfirst = editor:GetLine(0), false
- local filetype, wordskip, wordgood = getfiletype(), '', ''
-
- if filetype == 'tex' then
- wordskip = "\\"
- elseif filetype == 'xml' then
- wordskip = "<"
- wordgood = ">"
- end
-
- if props["ctx.spellcheck.language"] == 'auto' then
- if filetype == 'tex' then
- -- % version =1.0 language=uk
- firstline = string.gsub(firstline, "^%%%s*", '')
- firstline = string.gsub(firstline, "%s*$", '')
- for key, val in string.gfind(firstline,"(%w+)=(%w+)") do
- if key == "language" then
- language = val
- traceln("auto document language " .. "'" .. language .. "' (tex)")
- end
- end
- skipfirst = true
- elseif filetype == 'xml' then
- -- <?xml version='1.0' language='uk' ?>
- firstline = string.gsub(firstline, "^%<%?xml%s*", '')
- firstline = string.gsub(firstline, "%s*%?%>%s*$", '')
- for key, val in string.gfind(firstline,"(%w+)=[\"\'](.-)[\"\']") do
- if key == "language" then
- language = val
- traceln("auto document language " .. "'" .. language .. "' (xml)")
- end
- end
- skipfirst = true
- end
- end
-
- local fname = props["ctx.spellcheck.wordfile." .. language]
- local fsize = props["ctx.spellcheck.wordsize." .. language]
-
- if fsize ~= '' then wordsize = tonumber(fsize) end
-
- if fname ~= '' and fname ~= wordfile then
- wordfile, worddone, wordlist = fname, 0, {}
- for filename in string.gfind(wordfile,"[^%,]+") do
- if wordpath ~= '' then
- filename = string.expand(wordpath) .. '/' .. filename
- end
- if io.exists(filename) then
- traceln("loading " .. filename)
- for line in io.lines(filename) do
- if not string.find(line,"^[\%\#\-]") then
- str = string.gsub(line,"%s*$", '')
- rawset(wordlist,str,true) -- table.insert(wordlist,str)
- worddone = worddone + 1
- end
- end
- else
- traceln("unknown file '" .. filename .."'")
- end
- end
- traceln(worddone .. " words loaded")
- end
-
- reset_text()
-
- if worddone == 0 then
- traceln("no (valid) language or wordfile specified")
- else
- traceln("start checking")
- if wordskip ~= '' then
- traceln("ignoring " .. wordskip .. "..." .. wordgood)
- end
- local i, j, lastpos, startpos, endpos, snippet, len, first = 0, 0, -1, 0, 0, '', 0, 0
- local ok, skip, ch = false, false, ''
- if skipfirst then first = string.len(firstline) end
- for k = first, editor.TextLength do
- ch = editor:textrange(k,k+1)
- if wordgood ~= '' and ch == wordgood then
- skip = false
- elseif ch == wordskip then
- skip = true
- end
- if string.find(ch,"%w") and not string.find(ch,"%d") then
- if not skip then
- if ok then
- endpos = k
- else
- startpos = k
- endpos = k
- ok = true
- end
- end
- elseif ok and not skip then
- len = endpos - startpos + 1
- if len >= wordsize then
- snippet = editor:textrange(startpos,endpos+1)
- i = i + 1
- if wordlist[snippet] or wordlist[string.lower(snippet)] then -- table.found(wordlist,snippet)
- j = j + 1
- else
- editor:StartStyling(startpos,INDICS_MASK)
- editor:SetStyling(len,INDIC2_MASK) -- INDIC0_MASK+2
- end
- end
- ok = false
- elseif wordgood == '' then
- skip = (ch == wordskip)
- end
- end
- traceln(i .. " words checked, " .. (i-j) .. " errors")
- end
-
-end
-
-function reset_text()
- editor:StartStyling(0,INDICS_MASK)
- editor:SetStyling(editor.TextLength,INDIC_PLAIN)
-end
-
--- menu
-
-local menuactions = {}
-local menufunctions = {}
-
-function UserListShow(menutrigger, menulist)
- local menuentries = {}
- local list = string.grab(menulist,"[^%|]+")
- menuactions = {}
- for i=1, table.len(list) do
- if list[i] ~= '' then
- for key, val in string.gfind(list[i],"%s*(.+)=(.+)%s*") do
- table.insert(menuentries,key)
- rawset(menuactions,key,val)
- end
- end
- end
- local menustring = table.join(menuentries,'|')
- if menustring == "" then
- traceln("There are no templates defined for this file type.")
- else
- editor.AutoCSeparator = string.byte('|')
- editor:UserListShow(menutrigger,menustring)
- editor.AutoCSeparator = string.byte(' ')
- end
-end
-
-function OnUserListSelection(trigger,choice)
- if menufunctions[trigger] and menuactions[choice] then
- return menufunctions[trigger](menuactions[choice])
- else
- return false
- end
-end
-
--- main menu
-
-local menutrigger = 12
-
-function show_menu(menulist)
- UserListShow(menutrigger, menulist)
-end
-
-function process_menu(action)
- if not string.find(action,"%(%)$") then
- assert(loadstring(action .. "()"))()
- else
- assert(loadstring(action))()
- end
-end
-
-menufunctions[12] = process_menu
-
--- templates
-
-local templatetrigger = 13
-
--- local ctx_template_paths = { "./ctx-templates", "../ctx-templates", "../../ctx-templates" }
--- local ctx_auto_templates = false
--- local ctx_template_list = ""
--- local ctx_dir_list = { }
--- local ctx_dir_name = "./ctx-templates"
-
--- local ctx_path_list = {}
--- local ctx_path_done = {}
-
--- function ctx_list_loaded()
--- return ctx_dir_list and table.getn(ctx_dir_list) > 0
--- end
-
--- function insert_template(templatelist)
--- if props["ctx.template.scan"] == "yes" then
--- local current = props["FileDir"] .. "+" .. props["FileExt"] -- no name
--- local rescan = props["ctx.template.rescan"] == "yes"
--- local suffix = props["ctx.template.suffix."..props["FileExt"]] -- alas, no suffix expansion here
--- if rescan then
--- print("re-scanning enabled")
--- end
--- if current ~= ctx_file_path then
--- rescan = true
--- ctx_file_path = current
--- ctx_file_done = false
--- ctx_template_list = ""
--- end
--- if not ctx_file_done or rescan then
--- local pattern = "*.*"
--- for i, pathname in ipairs(ctx_template_paths) do
--- print("scanning " .. pathname .. " for " .. pattern)
--- ctx_dir_name = pathname
--- ctx_dir_list = get_dir_list(pathname .. "/" .. pattern)
--- if ctx_list_loaded() then
--- break
--- end
--- end
--- ctx_file_done = true
--- end
--- if ctx_list_loaded() then
--- ctx_template_list = ""
--- local pattern = "%." .. suffix .. "$"
--- for j, filename in ipairs(ctx_dir_list) do
--- if string.find(filename,pattern) then
--- local menuname = string.gsub(filename,"%..-$","")
--- if ctx_template_list ~= "" then
--- ctx_template_list = ctx_template_list .. "|"
--- end
--- ctx_template_list = ctx_template_list .. menuname .. "=" .. ctx_dir_name .. "/" .. filename
--- end
--- end
--- else
--- print("no template files found")
--- end
--- if ctx_template_list == "" then
--- ctx_auto_templates = false
--- print("no file related templates found")
--- else
--- ctx_auto_templates = true
--- templatelist = ctx_template_list
--- end
--- end
--- if templatelist ~= "" then
--- UserListShow(templatetrigger, templatelist)
--- end
--- end
-
-local ctx_template_paths = { "./ctx-templates", "../ctx-templates", "../../ctx-templates" }
-local ctx_auto_templates = false
-local ctx_template_list = ""
-
-local ctx_path_list = {}
-local ctx_path_done = {}
-local ctx_path_name = {}
-
-function ctx_list_loaded(path)
- return ctx_path_list[path] and table.getn(ctx_path_list[path]) > 0
-end
-
-function insert_template(templatelist)
- if props["ctx.template.scan"] == "yes" then
- local path = props["FileDir"]
- local rescan = props["ctx.template.rescan"] == "yes"
- local suffix = props["ctx.template.suffix." .. props["FileExt"]] -- alas, no suffix expansion here
- local current = path .. "+" .. props["FileExt"]
- if rescan then
- print("re-scanning enabled")
- end
- ctx_template_list = ""
- if not ctx_path_done[path] or rescan then
- local pattern = "*.*"
- for i, pathname in ipairs(ctx_template_paths) do
- print("scanning " .. string.gsub(path,"\\","/") .. "/" .. pathname)
- ctx_path_name[path] = pathname
- ctx_path_list[path] = get_dir_list(pathname .. "/" .. pattern)
- if ctx_list_loaded(path) then
- print("finished locating template files")
- break
- end
- end
- if ctx_list_loaded(path) then
- print(table.getn(ctx_path_list[path]) .. " template files found")
- else
- print("no template files found")
- end
- end
- if ctx_list_loaded(path) then
- ctx_template_list = ""
- local pattern = "%." .. suffix .. "$"
- local n = 0
- for j, filename in ipairs(ctx_path_list[path]) do
- if string.find(filename,pattern) then
- n = n + 1
- local menuname = string.gsub(filename,"%..-$","")
- if ctx_template_list ~= "" then
- ctx_template_list = ctx_template_list .. "|"
- end
- ctx_template_list = ctx_template_list .. menuname .. "=" .. ctx_path_name[path] .. "/" .. filename
- end
- end
- if not ctx_path_done[path] then
- print(n .. " suitable template files found")
- end
- end
- ctx_path_done[path] = true
- if ctx_template_list == "" then
- ctx_auto_templates = false
- else
- ctx_auto_templates = true
- templatelist = ctx_template_list
- end
- else
- ctx_auto_templates = false
- end
- if templatelist ~= "" then
- UserListShow(templatetrigger, templatelist)
- end
-end
-
-
--- ctx.template.[whatever].[filetype]
--- ctx.template.[whatever].data.[filetype]
--- ctx.template.[whatever].file.[filetype]
--- ctx.template.[whatever].list.[filetype]
-
-function process_template_one(action)
- local text = nil
- if ctx_auto_templates then
- local f = io.open(action,"r")
- if f then
- text = string.gsub(f:read("*all"),"\n$","")
- f:close()
- else
- print("unable to auto load template file " .. text)
- text = nil
- end
- end
- if not text or text == "" then
- text = props["ctx.template." .. action .. ".file"]
- if not text or text == "" then
- text = props["ctx.template." .. action .. ".data"]
- if not text or text == "" then
- text = props["ctx.template." .. action]
- end
- else
- local f = io.open(text,"r")
- if f then
- text = string.gsub(f:read("*all"),"\n$","")
- f:close()
- else
- print("unable to load template file " .. text)
- text = nil
- end
- end
- end
- if text then
- text = string.replace(text,"\\n","\n")
- local pos = string.find(text,"%?")
- text = string.replace(text,"%?","")
- editor:insert(editor.CurrentPos,text)
- if pos then
- editor.CurrentPos = editor.CurrentPos + pos - 1
- editor.SelectionStart = editor.CurrentPos
- editor.SelectionEnd = editor.CurrentPos
- editor:GotoPos(editor.CurrentPos)
- end
- end
-end
-
-menufunctions[13] = process_template_one
-menufunctions[14] = process_template_two
-
--- command.name.26.*=Open Logfile
--- command.subsystem.26.*=3
--- command.26.*=open_log
--- command.save.before.26.*=2
--- command.groupundo.26.*=yes
--- command.shortcut.26.*=Ctrl+E
-
-function open_log()
- scite.Open(props['FileName'] .. ".log")
-end
diff --git a/scripts/context/ruby/base/kpse.rb b/scripts/context/ruby/base/kpse.rb
index a4babae55..0e185b5b8 100644
--- a/scripts/context/ruby/base/kpse.rb
+++ b/scripts/context/ruby/base/kpse.rb
@@ -64,8 +64,12 @@ module Kpse
# @@distribution = 'miktex' if ENV['PATH'] =~ /miktex[\\\/]bin/o
- if ENV['PATH'] =~ /(.*?)miktex[\\\/]bin/i then
- @@distribution = 'miktex' unless $1 =~ /(texmf\-mswin[\/\\]bin|bin[\/\\]win32)/i
+ # if ENV['PATH'] =~ /(.*?)miktex[\\\/]bin/i then
+ # @@distribution = 'miktex' unless $1 =~ /(texmf\-mswin[\/\\]bin|bin[\/\\]win32)/i
+ # end
+
+ if @@mswindows && (ENV['PATH'] =~ /(.*?)miktex[\\\/]bin/i) then
+ @@distribution = 'miktex' unless $1 =~ /(texmf\-mswin[\/\\]bin|bin[\/\\]win32)/i
end
@@re_true = /yes|on|true|1/i
diff --git a/scripts/context/ruby/base/tex.rb b/scripts/context/ruby/base/tex.rb
index 73b382af9..54d5bc730 100644
--- a/scripts/context/ruby/base/tex.rb
+++ b/scripts/context/ruby/base/tex.rb
@@ -90,25 +90,7 @@ class TEX
@@luafiles = "luafiles.tmp"
@@luatarget = "lua/context"
- # we now drop pdfetex definitely
-
- # ENV['PATH'].split(File::PATH_SEPARATOR).each do |p|
- # if System.unix? then
- # pp, pe = "#{p}/pdftex" , "#{p}/pdfetex"
- # else
- # pp, pe = "#{p}/pdftex.exe", "#{p}/pdfetex.exe"
- # end
- # if FileTest.file?(pe) then # we assume no update
- # @@pdftex = 'pdfetex'
- # break
- # elsif FileTest.file?(pp) then # we assume an update
- # @@pdftex = 'pdftex'
- # break
- # end
- # end
-
- # ['etex','pdfetex','standard'] .each do |e| @@texengines[e] = @@pdftex end
- # ['tex','pdftex'] .each do |e| @@texengines[e] = 'pdftex' end
+ @@platformslash = if System.unix? then "\\\\" else "\\" end
['tex','etex','pdftex','pdfetex','standard'] .each do |e| @@texengines[e] = 'pdftex' end
['aleph','omega'] .each do |e| @@texengines[e] = 'aleph' end
@@ -120,6 +102,7 @@ class TEX
['pdfetex','pdftex','pdf','pdftex','standard'] .each do |b| @@backends[b] = 'pdftex' end
['dvipdfmx','dvipdfm','dpx','dpm'] .each do |b| @@backends[b] = 'dvipdfmx' end
['xetex','xtx'] .each do |b| @@backends[b] = 'xetex' end
+ ['aleph'] .each do |b| @@backends[b] = 'dvipdfmx' end
['dvips','ps','dvi'] .each do |b| @@backends[b] = 'dvips' end
['dvipsone'] .each do |b| @@backends[b] = 'dvipsone' end
['acrobat','adobe','distiller'] .each do |b| @@backends[b] = 'acrobat' end
@@ -164,11 +147,11 @@ class TEX
['plain','default','standard'] .each do |f| @@mpsmethods[f] = 'plain' end
['metafun'] .each do |f| @@mpsmethods[f] = 'metafun' end
- @@texmakestr['plain'] = "\\dump"
- @@mpsmakestr['plain'] = "\\dump"
+ @@texmakestr['plain'] = @@platformslash + "dump"
+ @@mpsmakestr['plain'] = @@platformslash + "dump"
['cont-en','cont-nl','cont-de','cont-it',
- 'cont-fr','cont-cz','cont-ro','cont-uk'] .each do |f| @@texprocstr[f] = "\\emergencyend" end
+ 'cont-fr','cont-cz','cont-ro','cont-uk'] .each do |f| @@texprocstr[f] = @@platformslash + "emergencyend" end
@@runoptions['aleph'] = ['--8bit']
@@runoptions['luatex'] = ['--file-line-error']
@@ -1885,7 +1868,7 @@ end
if globalfile || FileTest.file?(rawname) then
- if not dummyfile and not globalfile then
+ if not dummyfile and not globalfile and not forcexml then
scantexpreamble(rawname)
scantexcontent(rawname) if getvariable('texformats').standard?
end
diff --git a/scripts/context/ruby/base/texutil.rb b/scripts/context/ruby/base/texutil.rb
index 9c43f00e9..4882404d5 100644
--- a/scripts/context/ruby/base/texutil.rb
+++ b/scripts/context/ruby/base/texutil.rb
@@ -706,8 +706,8 @@ class TeXUtil
elsif alpha == @@specialsymbol then
character = @@specialbanner
elsif alpha.length > 1 then
- # character = "\\getvalue\{#{alpha}\}%"
- character = "\\#{alpha}%"
+ # character = "\\getvalue\{#{alpha}\}"
+ character = "\\#{alpha}"
else
character = "\\unknown"
end
diff --git a/scripts/context/ruby/graphics/gs.rb b/scripts/context/ruby/graphics/gs.rb
index a73400ba2..cb3d016f4 100644
--- a/scripts/context/ruby/graphics/gs.rb
+++ b/scripts/context/ruby/graphics/gs.rb
@@ -296,9 +296,9 @@ class GhostScript
def gscolorswitch
case getvariable('colormodel')
- when 'cmyk' then '-dProcessColorModel=/DeviceCMYK '
- when 'rgb' then '-dProcessColorModel=/DeviceRGB '
- when 'gray' then '-dProcessColorModel=/DeviceGRAY '
+ when 'cmyk' then '-dProcessColorModel=/DeviceCMYK -dColorConversionStrategy=/CMYK '
+ when 'rgb' then '-dProcessColorModel=/DeviceRGB -dColorConversionStrategy=/RGB '
+ when 'gray' then '-dProcessColorModel=/DeviceGRAY -dColorConversionStrategy=/GRAY '
else
''
end
diff --git a/scripts/context/ruby/texexec.rb b/scripts/context/ruby/texexec.rb
index 3ba3388f0..d2c722438 100644
--- a/scripts/context/ruby/texexec.rb
+++ b/scripts/context/ruby/texexec.rb
@@ -111,7 +111,7 @@ class Commands
if job = TEX.new(logger) then
prepare(job)
job.cleanuptemprunfiles
- files = @commandline.arguments.sort
+ files = if @commandline.option('sort') then @commandline.arguments.sort else @commandline.arguments end
if files.length > 0 then
if f = File.open(job.tempfilename('tex'),'w') then
backspace = @commandline.checkedoption('backspace', '1.5cm')
@@ -156,7 +156,7 @@ class Commands
prepare(job)
job.cleanuptemprunfiles
fast = @commandline.option('fast')
- files = @commandline.arguments.sort
+ files = if @commandline.option('sort') then @commandline.arguments.sort else @commandline.arguments end
if fast or (files.length > 0) then
if f = File.open(job.tempfilename('tex'),'w') then
files.delete("texexec.pdf")
@@ -202,7 +202,7 @@ class Commands
if job = TEX.new(logger) then
prepare(job)
job.cleanuptemprunfiles
- files = @commandline.arguments.sort
+ files = if @commandline.option('sort') then @commandline.arguments.sort else @commandline.arguments end
msuffixes = ['tex','mkii','mkiv','mp','pl','pm','rb']
if files.length > 0 then
files.each do |fname|
@@ -302,7 +302,7 @@ class Commands
if job = TEX.new(logger) then
prepare(job)
job.cleanuptemprunfiles
- files = @commandline.arguments.sort
+ files = if @commandline.option('sort') then @commandline.arguments.sort else @commandline.arguments end
if files.length > 0 then
if f = File.open(job.tempfilename('tex'),'w') then
emptypages = @commandline.checkedoption('addempty', '')
@@ -355,7 +355,7 @@ class Commands
if job = TEX.new(logger) then
prepare(job)
job.cleanuptemprunfiles
- files = @commandline.arguments.sort
+ files = if @commandline.option('sort') then @commandline.arguments.sort else @commandline.arguments end
if files.length > 0 then
if f = File.open(job.tempfilename('tex'),'w') then
selection = @commandline.checkedoption('selection', '')
@@ -425,7 +425,7 @@ class Commands
if job = TEX.new(logger) then
prepare(job)
job.cleanuptemprunfiles
- files = @commandline.arguments.sort
+ files = if @commandline.option('sort') then @commandline.arguments.sort else @commandline.arguments end
if files.length > 0 then
if f = File.open(job.tempfilename('tex'),'w') then
scale = @commandline.checkedoption('scale')
@@ -492,7 +492,7 @@ class Commands
if job = TEX.new(logger) then
prepare(job)
job.cleanuptemprunfiles
- files = @commandline.arguments.sort
+ files = if @commandline.option('sort') then @commandline.arguments.sort else @commandline.arguments end
if files.length > 0 then
if f = File.open(job.tempfilename('tex'),'w') then
paperoffset = @commandline.checkedoption('paperoffset', '0cm')
@@ -762,6 +762,7 @@ commandline.registerflag('aleph')
commandline.registerflag('all')
commandline.registerflag('fast')
+commandline.registerflag('sort')
# generic
diff --git a/scripts/context/ruby/www/exa.rb b/scripts/context/ruby/www/exa.rb
index 997eab67d..20a40fc7b 100644
--- a/scripts/context/ruby/www/exa.rb
+++ b/scripts/context/ruby/www/exa.rb
@@ -368,6 +368,7 @@ class WWW
end
def handle_exastatus
+ get_cfg() # weird, needed for apache, but not for wwwserver
if request_variable('id').empty? then
if id = valid_session() then
send_result()
diff --git a/scripts/context/ruby/www/lib.rb b/scripts/context/ruby/www/lib.rb
index f5f362b12..b9a44c9f6 100644
--- a/scripts/context/ruby/www/lib.rb
+++ b/scripts/context/ruby/www/lib.rb
@@ -163,7 +163,7 @@ class WWW
@interface.set('template:login' , 'exalogin.htm')
@interface.set('process:timeout' , @@session_max_age)
@interface.set('process:threshold' , @@send_threshold)
- @interface.set('process:background', 'yes') # this demands a watchdog being active
+ @interface.set('process:background', 'yes') # this demands a watchdog being active
@interface.set('process:indirect' , 'no') # indirect download, no direct feed
@interface.set('process:autologin' , 'yes') # provide default interface when applicable
@interface.set('process:exaurl' , '') # this one will be used as replacement in templates
@@ -1226,6 +1226,12 @@ class WWW
return ! (@session.nothing?('gui') && @session.nothing?('path') && @session.nothing?('process'))
end
+ def get_cfg()
+ if data = load_interface_file() then
+ fetch_session_interface_variables(data)
+ end
+ end
+
end
class WWW
diff --git a/tex/context/base/attr-ini.lua b/tex/context/base/attr-ini.lua
index 3a5ca3933..b8cf7e92c 100644
--- a/tex/context/base/attr-ini.lua
+++ b/tex/context/base/attr-ini.lua
@@ -6,11 +6,31 @@ if not modules then modules = { } end modules ['attr-ini'] = {
license = "see context related readme files"
}
+-- nb: attributes: color etc is much slower than normal (marks + literals) but ...
+
+--
+-- nodes
+--
+
+nodes = nodes or { }
+
--
-- attributes
--
-nodes = nodes or { }
+attributes = attributes or { }
+
+attributes.names = attributes.names or { }
+attributes.numbers = attributes.numbers or { }
+attributes.list = attributes.list or { }
+
+input.storage.register(false,"attributes/names", attributes.names, "attributes.names")
+input.storage.register(false,"attributes/numbers", attributes.numbers, "attributes.numbers")
+input.storage.register(false,"attributes/list", attributes.list, "attributes.list")
+
+function attributes.define(name,number)
+ attributes.numbers[name], attributes.names[number], attributes.list[number] = number, name, { }
+end
-- We can distinguish between rules and glyphs but it's not worth the trouble. A
-- first implementation did that and while it saves a bit for glyphs and rules, it
@@ -23,7 +43,8 @@ nodes = nodes or { }
function totokens(str)
local t = { }
- for c in string.bytes(str) do
+--~ for c in string.bytes(str) do
+ for c in str:bytes() do
t[#t+1] = { 12, c }
end
return t
@@ -35,11 +56,16 @@ backends = backends or { }
backends.pdf = backends.pdf or { }
backend = backend or backends.pdf
-function backends.pdf.literal(str)
- local t = node.new('whatsit',8)
- t.mode = 1 -- direct
- t.data = str -- totokens(str)
- return t
+do
+
+ local pdfliteral, register = nodes.pdfliteral, nodes.register
+
+ function backends.pdf.literal(str)
+ local t = pdfliteral(str)
+ register(t)
+ return t
+ end
+
end
-- shipouts
@@ -49,25 +75,29 @@ shipouts.plugins = shipouts.plugins or { }
do
+ local pairs = pairs -- in theory faster
+
local hlist, vlist = node.id('hlist'), node.id('vlist')
- local contains = node.has_attribute
+ local has_attribute = node.has_attribute
- nodes.trigger = false
- nodes.triggering = false
+ nodes.trigger = nodes.trigger or false
+ nodes.triggering = nodes.triggering or false
-- we used to do the main processor loop here and call processor for each node
-- but eventually this was too much a slow down (1 sec on 23 for 120 pages mk)
- -- so that we moved looping to teh processor itself; this may lead to a bit of
+ -- so that we moved looping to the processor itself; this may lead to a bit of
-- duplicate code once that we have more state handlers
- function nodes.process_attributes(head,plugins)
+ local starttiming, stoptiming = input.starttiming, input.stoptiming
+
+ local function process_attributes(head,plugins)
if head then -- is already tested
- input.start_timing(attributes)
- local trigger = nodes.trigger
+ starttiming(attributes)
local done, used = false, { }
+ local trigger, numbers = nodes.trigger, attributes.numbers
for name, plugin in pairs(plugins) do
- local attribute = attributes.numbers[name]
+ local attribute = numbers[name]
if attribute then
local namespace = plugin.namespace
if namespace.enabled then
@@ -80,12 +110,12 @@ do
end
if processor then
local inheritance = (resolver and resolver()) or -1
- local ok
+ local ok -- = false
head, ok = processor(namespace,attribute,head,inheritance)
done = done or ok
end
if finalizer then -- no need when not ok
- local ok
+ local ok -- = false
head, ok, used[attribute] = finalizer(namespace,attribute,head)
done = done or ok
end
@@ -96,47 +126,80 @@ do
end
if done then
for name, plugin in pairs(plugins) do
- local attribute = attributes.numbers[name]
+ local attribute = numbers[name]
if used[attribute] then
local namespace = plugin.namespace
if namespace.enabled then
local flusher = plugin.flusher
if flusher then
local h, d = flusher(namespace,attribute,head,used[attribute])
+ head = h
end
end
end
end
end
- input.stop_timing(attributes)
+ stoptiming(attributes)
return head, done
else
return head, false
end
end
- function nodes.process_page(head)
- return nodes.process_attributes(head,shipouts.plugins)
+ nodes.process_attributes = process_attributes
+
+ --~ glyph = 746876
+ --~ glue = 376096
+ --~ hlist = 152284
+ --~ disc = 47224
+ --~ kern = 41504
+ --~ penalty = 31964
+ --~ whatsit = 29048
+ --~ vlist = 20136
+ --~ rule = 13292
+ --~ mark = 4304
+ --~ math = 1072
+
+ local disc, mark, free = node.id('disc'), node.id('mark'), node.free
+
+ local function cleanup_page(head) -- rough
+ local prev, start = nil, head
+ while start do
+ local id, nx = start.id, start.next
+ if id == disc or id == mark then
+ if prev then
+ prev.next = nx
+ end
+ if start == head then
+ head = nx
+ end
+ local tmp = start
+ start = nx
+ free(tmp)
+ elseif id == hlist or id == vlist then
+ local sl = start.list
+ if sl then
+ start.list = cleanup_page(sl)
+ end
+ prev, start = start, nx
+ else
+ prev, start = start, nx
+ end
+ end
+ return head
end
-end
+ nodes.cleanup_page = cleanup_page
---
--- attributes
---
+ nodes.cleanup_page_first = false
-attributes = attributes or { }
-
-attributes.names = attributes.names or { }
-attributes.numbers = attributes.numbers or { }
-attributes.list = attributes.list or { }
-
-input.storage.register(false,"attributes/names", attributes.names, "attributes.names")
-input.storage.register(false,"attributes/numbers", attributes.numbers, "attributes.numbers")
-input.storage.register(false,"attributes/list", attributes.list, "attributes.list")
+ function nodes.process_page(head)
+ if nodes.cleanup_page_first then
+ head = cleanup_page(head)
+ end
+ return process_attributes(head,shipouts.plugins)
+ end
-function attributes.define(name,number)
- attributes.numbers[name], attributes.names[number], attributes.list[number] = number, name, { }
end
--
@@ -149,15 +212,15 @@ do
local glyph, rule, whatsit, hlist, vlist = node.id('glyph'), node.id('rule'), node.id('whatsit'), node.id('hlist'), node.id('vlist')
+ local has_attribute, copy = node.has_attribute, node.copy
+
local current, used, done = 0, { }, false
function states.initialize(what, attribute, stack)
current, used, done = 0, { }, false
end
- local contains, copy = node.has_attribute, node.copy
-
- local function insert(n,stack,previous,head)
+ local function insert(n,stack,previous,head) -- there is a helper, we need previous because we are not slided
if n then
n = copy(n)
n.next = stack
@@ -168,71 +231,75 @@ do
end
previous = n
end
- return stack, previous, head
+ return stack, head
end
- function states.finalize(namespace,attribute,head)
- if current > 0 and namespace.none then
- if head.id == hlist or head.id == vlist then
- local stack, previous, head = insert(namespace.none,head.list,nil,head.list)
- else
- local stack, previous, head = insert(namespace.none,head,nil,head)
+ function states.finalize(namespace,attribute,head) -- is this one ok?
+ if current > 0 then
+ local nn = namespace.none
+ if nn then
+ local id = head.id
+ if id == hlist or id == vlist then
+ local list = head.list
+ if list then
+ local _, h = insert(nn,list,nil,list)
+ head.list = h
+ end
+ else
+ stack, head = insert(nn,head,nil,head)
+ end
+ return head, true, true
end
- return head, true, true
- else
- return head, false, false
end
+ return head, false, false
end
- function states.process(namespace,attribute,head,inheritance,default) -- one attribute
-local contains = node.has_attribute
-local glyph, rule, whatsit, hlist, vlist = node.id('glyph'), node.id('rule'), node.id('whatsit'), node.id('hlist'), node.id('vlist')
+ local function process(namespace,attribute,head,inheritance,default) -- one attribute
local trigger = namespace.triggering and nodes.triggering and nodes.trigger
---~ local trigger = nodes.triggering and nodes.trigger
- local stack, previous, done, process = head, nil, false, states.process
+ local stack, previous, done = head, nil, false
local nsdata, nsreviver, nsnone = namespace.data, namespace.reviver, namespace.none
while stack do
local id = stack.id
- if id == hlist or id == vlist then
- local content = stack.list
- if content then
- local ok = false
- if trigger and contains(stack,trigger) then
- local outer = contains(stack,attribute)
- if outer ~= inheritance then
- stack.list, ok = process(namespace,attribute,content,inheritance,outer)
- else
- stack.list, ok = process(namespace,attribute,content,inheritance,default)
- end
- else
- stack.list, ok = process(namespace,attribute,content,inheritance,default)
- end
- done = done or ok
- end
- elseif id == glyph or id == rule or id == whatsit then -- special
- local c = contains(stack,attribute)
+ if id == glyph or id == whatsit or id == rule then -- or disc
+ local c = has_attribute(stack,attribute)
if c then
if default and c == inheritance then
if current ~= default then
local data = nsdata[default] or nsreviver(default)
- stack, previous, head = insert(data,stack,previous,head)
+ stack, head = insert(data,stack,previous,head)
current, done, used[default] = default, true, true
end
elseif current ~= c then
local data = nsdata[c] or nsreviver(c)
- stack, previous, head = insert(data,stack,previous,head)
+ stack, head = insert(data,stack,previous,head)
current, done, used[c] = c, true, true
end
elseif default and inheritance then
if current ~= default then
local data = nsdata[default] or nsreviver(default)
- stack, previous, head = insert(data,stack,previous,head)
+ stack, head = insert(data,stack,previous,head)
current, done, used[default] = default, true, true
end
elseif current > 0 then
- stack, previous, head = insert(nsnone,stack,previous,head)
+ stack, head = insert(nsnone,stack,previous,head)
current, done, used[0] = 0, true, true
end
+ elseif id == hlist or id == vlist then
+ local content = stack.list
+ if content then
+ local ok = false
+ if trigger and has_attribute(stack,trigger) then
+ local outer = has_attribute(stack,attribute)
+ if outer ~= inheritance then
+ stack.list, ok = process(namespace,attribute,content,inheritance,outer)
+ else
+ stack.list, ok = process(namespace,attribute,content,inheritance,default)
+ end
+ else
+ stack.list, ok = process(namespace,attribute,content,inheritance,default)
+ end
+ done = done or ok
+ end
end
previous = stack
stack = stack.next
@@ -240,59 +307,58 @@ local glyph, rule, whatsit, hlist, vlist = node.id('glyph'), node.id('rule'), no
return head, done
end
+ states.process = process
+
-- we can force a selector, e.g. document wide color spaces, saves a little
- function states.selective(namespace,attribute,head,inheritance,default) -- two attributes
-local contains = node.has_attribute
-local glyph, rule, whatsit, hlist, vlist = node.id('glyph'), node.id('rule'), node.id('whatsit'), node.id('hlist'), node.id('vlist')
+ local function selective(namespace,attribute,head,inheritance,default) -- two attributes
local trigger = namespace.triggering and nodes.triggering and nodes.trigger
---~ local trigger = nodes.triggering and nodes.trigger
- local stack, previous, done, selective = head, nil, false, states.selective
+ local stack, previous, done = head, nil, false
local nsselector, nsforced, nsselector = namespace.default, namespace.forced, namespace.selector
local nsdata, nsreviver, nsnone = namespace.data, namespace.reviver, namespace.none
while stack do
local id = stack.id
- if id == hlist or id == vlist then
- local content = stack.list
- if content then
- local ok = false
- if trigger and contains(stack,trigger) then
- local outer = contains(stack,attribute)
- if outer ~= inheritance then
- stack.list, ok = selective(namespace,attribute,content,inheritance,outer)
- else
- stack.list, ok = selective(namespace,attribute,content,inheritance,default)
- end
- else
- stack.list, ok = selective(namespace,attribute,content,inheritance,default)
- end
- done = done or ok
- end
- elseif id == glyph or id == rule or id == whatsit then -- special
+ if id == glyph or id == whatsit or id == rule then -- or disc
-- todo: maybe track two states, also selector
- local c = contains(stack,attribute)
+ local c = has_attribute(stack,attribute)
if c then
if default and c == inheritance then
if current ~= default then
local data = nsdata[default] or nsreviver(default)
- stack, previous, head = insert(data[nsforced or contains(stack,nsselector) or nsselector],stack,previous,head)
+ stack, head = insert(data[nsforced or has_attribute(stack,nsselector) or nsselector],stack,previous,head)
current, done, used[default] = default, true, true
end
elseif current ~= c then
local data = nsdata[c] or nsreviver(c)
- stack, previous, head = insert(data[nsforced or contains(stack,nsselector) or nsselector],stack,previous,head)
+ stack, head = insert(data[nsforced or has_attribute(stack,nsselector) or nsselector],stack,previous,head)
current, done, used[c] = c, true, true
end
elseif default and inheritance then
if current ~= default then
local data = nsdata[default] or nsreviver(default)
- stack, previous, head = insert(data[nsforced or contains(stack,nsselector) or nsselector],stack,previous,head)
+ stack, head = insert(data[nsforced or has_attribute(stack,nsselector) or nsselector],stack,previous,head)
current, done, used[default] = default, true, true
end
elseif current > 0 then
- stack, previous, head = insert(nsnone,stack,previous,head)
+ stack, head = insert(nsnone,stack,previous,head)
current, done, used[0] = 0, true, true
end
+ elseif id == hlist or id == vlist then
+ local content = stack.list
+ if content then
+ local ok = false
+ if trigger and has_attribute(stack,trigger) then
+ local outer = has_attribute(stack,attribute)
+ if outer ~= inheritance then
+ stack.list, ok = selective(namespace,attribute,content,inheritance,outer)
+ else
+ stack.list, ok = selective(namespace,attribute,content,inheritance,default)
+ end
+ else
+ stack.list, ok = selective(namespace,attribute,content,inheritance,default)
+ end
+ done = done or ok
+ end
end
previous = stack
stack = stack.next
@@ -300,6 +366,8 @@ local glyph, rule, whatsit, hlist, vlist = node.id('glyph'), node.id('rule'), no
return head, done
end
+ states.selective = selective
+
end
states = states or { }
@@ -308,14 +376,21 @@ states.collected = states.collected or { }
input.storage.register(false,"states/collected", states.collected, "states.collected")
function states.collect(str)
- states.collected[#states.collected+1] = str
+ local collected = states.collected
+ collected[#collected+1] = str
end
function states.flush()
- for _, c in ipairs(states.collected) do
- tex.sprint(tex.ctxcatcodes,c)
+--~ for _, c in ipairs(states.collected) do
+--~ tex.sprint(tex.ctxcatcodes,c)
+--~ end
+ local collected = states.collected
+ if #collected > 0 then
+ for i=1,#collected do
+ tex.sprint(tex.ctxcatcodes,collected[i]) -- we're in context mode anyway
+ end
+ states.collected = { }
end
- states.collected = { }
end
function states.check()
@@ -516,8 +591,8 @@ shipouts.plugins.color = {
namespace = colors,
triggering = true,
initializer = states.initialize,
- finalizer = states.finalize ,
- processor = states.selective ,
+ finalizer = states.finalize,
+ processor = states.selective,
resolver = function(...) return colors.main end,
}
@@ -530,7 +605,7 @@ transparencies = transparencies or { }
transparencies.registered = transparencies.registered or { }
transparencies.data = transparencies.data or { }
transparencies.values = transparencies.values or { }
-transparencies.enabled = true
+transparencies.enabled = false
transparencies.template = "%s:%s"
input.storage.register(false, "transparencies/registered", transparencies.registered, "transparencies.registered")
@@ -591,7 +666,7 @@ shipouts.plugins.transparency = {
overprints = overprints or { }
overprints.data = overprints.data or { }
-overprints.enabled = true
+overprints.enabled = false
overprints.data[1] = backends.pdf.literal(string.format("/GSoverprint gs"))
overprints.data[2] = backends.pdf.literal(string.format("/GSknockout gs"))
@@ -619,7 +694,7 @@ shipouts.plugins.overprint = {
negatives = netatives or { }
negatives.data = negatives.data or { }
-negatives.enabled = true
+negatives.enabled = false
negatives.data[1] = backends.pdf.literal(string.format("/GSpositive gs"))
negatives.data[2] = backends.pdf.literal(string.format("/GSnegative gs"))
@@ -648,7 +723,7 @@ shipouts.plugins.negative = {
effects = effects or { }
effects.data = effects.data or { }
effects.registered = effects.registered or { }
-effects.enabled = true
+effects.enabled = false
effects.stamp = "%s:%s:%s"
input.storage.register(false, "effects/registered", effects.registered, "effects.registered")
diff --git a/tex/context/base/attr-ini.tex b/tex/context/base/attr-ini.tex
index a73e15cf7..ae7cf019e 100644
--- a/tex/context/base/attr-ini.tex
+++ b/tex/context/base/attr-ini.tex
@@ -42,6 +42,8 @@
\appendetoks\csname @attr@#1\endcsname\minusone\to\attributesresetlist
\ctxlua{attributes.define("#1",\number\attdefcounter)}}
+% expandable so we can \edef them for speed
+
\def\dosetattribute#1#2{\csname @attr@#1\endcsname#2\relax}
\def\doresetattribute#1{\csname @attr@#1\endcsname\minusone}
\def\dogetattribute #1{\number\csname @attr@#1\endcsname}
@@ -50,9 +52,9 @@
\let\dompattribute\gobbletwoarguments
\defineattribute[mark]
-\defineattribute[status]
+% \defineattribute[status] % used ? maybe combine with
+\defineattribute[state]
\defineattribute[trigger] % feature inheritance
-% \defineattribute[language]
\defineattribute[skip]
\defineattribute[penalty]
@@ -60,6 +62,15 @@
nodes.trigger = \dogetattributeid{trigger}
\stopruntimectxluacode
+% \defineattribute[ignore]
+%
+% \edef\startignorecontent{\dosetattribute{ignore}\plusone}
+% \edef\stopignorecontent {\doresetattribute{ignore}}
+%
+% \startruntimectxluacode
+% nodes.ignore = \dogetattributeid{ignore}
+% \stopruntimectxluacode
+
% \dosetattribute{status}{1}
% temp here / will be indirect ! just for testing
@@ -93,11 +104,16 @@
\defineattribute[transparency]
-\def\registertransparency#1#2#3% we need to fake a name in the current setup, same as color
+\def\registertransparency#1#2#3%
{\setevalue{(ts:#1)}{\dosetattribute{transparency}{\ctxlua{tex.print(transparencies.register(#2,#3))}}}}
\def\sometransparencyswitch#1{\csname(ts:#1)\endcsname}
+\def\sometransparencyswitch
+ {\ctxlua{transparencies.enabled=true}%
+ \gdef\sometransparencyswitch##1{\csname(ts:##1)\endcsname}%
+ \sometransparencyswitch}
+
% \registertransparency {one} {1} {.5}
% \registertransparency {two} {1} {.6}
@@ -109,8 +125,10 @@
{\initializePDFoverprint % temp here, to be tested in la code (states.collect)
\setvalue{(os:#1)}{\dosetattribute{overprint}{\ctxlua{tex.print(overprints.register('#2'))}}}}
-\def\dotriggeroverprint#1%
- {\csname(os:#1)\endcsname}
+\def\dotriggeroverprint
+ {\ctxlua{overprints.enabled=true}%
+ \gdef\dotriggeroverprint##1{\csname(os:##1)\endcsname}%
+ \dotriggeroverprint}
% \registeroverprint{knockout} {knockout}
% \registeroverprint{overprint}{overprint}
@@ -123,8 +141,10 @@
{\initializePDFnegative % temp here, to be tested in la code (states.collect)
\setvalue{(ns:#1)}{\dosetattribute{negative}{\ctxlua{tex.print(negatives.register('#2'))}}}}
-\def\dotriggernegative#1%
- {\csname(ns:#1)\endcsname}
+\def\dotriggernegative
+ {\ctxlua{negatives.enabled=true}%
+ \gdef\dotriggernegative##1{\csname(ns:##1)\endcsname}%
+ \dotriggernegative}
% \registernegative{positive}{positive}
% \registernegative{negative}{negative}
@@ -137,9 +157,16 @@
{\setxvalue{(es:#1:#2:\number\dimexpr#3\relax)}%
{\dosetattribute{effect}{\ctxlua{tex.print(effects.register('#1',#2,\number\dimexpr#3\relax))}}}}
-\def\dotriggereffect#1#2#3%
- {\ifcsname(es:#1:#2:\number\dimexpr#3\relax)\endcsname\else\registereffect{#1}{#2}{#3}\fi
- \csname(es:#1:#2:\number\dimexpr#3\relax)\endcsname}
+% \def\dotriggereffect#1#2#3%
+% {\ifcsname(es:#1:#2:\number\dimexpr#3\relax)\endcsname\else\registereffect{#1}{#2}{#3}\fi
+% \csname(es:#1:#2:\number\dimexpr#3\relax)\endcsname}
+
+\def\dotriggereffect
+ {\ctxlua{effects.enabled=true}%
+ \gdef\dotriggereffect##1##2##3%
+ {\ifcsname(es:##1:##2:\number\dimexpr##3\relax)\endcsname\else\registereffect{##1}{##2}{##3}\fi
+ \csname(es:##1:##2:\number\dimexpr##3\relax)\endcsname}%
+ \dotriggereffect}
% \registereffect{normal}
% \registereffect{inner}
@@ -177,7 +204,6 @@
\let\normalshipout\shipout
-
% tricky stuff:
\newcount\attributeboxcount
diff --git a/tex/context/base/char-def.tex b/tex/context/base/char-def.tex
index 9e722ba13..2399c73a3 100644
--- a/tex/context/base/char-def.tex
+++ b/tex/context/base/char-def.tex
@@ -23,6 +23,8 @@
\global\uccode #1=#3 }
\registerctxluafile{char-def}{1.001}
+\registerctxluafile{char-mth}{1.001}
+\registerctxluafile{char-syn}{1.001}
%D The codes are stored in the format, so we don't need to reinitialize
%D them (unless of course we have adapted the table).
diff --git a/tex/context/base/char-ini.lua b/tex/context/base/char-ini.lua
index 95030da86..c0ce04ef6 100644
--- a/tex/context/base/char-ini.lua
+++ b/tex/context/base/char-ini.lua
@@ -19,9 +19,10 @@ from the big character table that we use for all kind of purposes:
<type>char-def.lua</type>.</p>
--ldx]]--
-characters = characters or { }
-characters.data = characters.data or { }
-characters.context = characters.context or { }
+characters = characters or { }
+characters.data = characters.data or { }
+characters.synonyms = characters.synonyms or { }
+characters.context = characters.context or { }
do
local _empty_table_ = { __index = function(t,k) return "" end }
@@ -43,18 +44,22 @@ characters.context.utfcodes = characters.context.utfcodes or { }
characters.context.enccodes = characters.context.enccodes or { }
function characters.context.rehash()
- local unicodes, utfcodes, enccodes = characters.context.unicodes, characters.context.utfcodes, characters.context.enccodes
+ local unicodes, utfcodes, enccodes, utfchar = characters.context.unicodes, characters.context.utfcodes, characters.context.enccodes, utf.char
for k,v in pairs(characters.data) do
local contextname, adobename = v.contextname, v.adobename
if contextname then
- unicodes[contextname] = v.unicodeslot
- utfcodes[contextname] = utf.char(v.unicodeslot)
+ local slot = v.unicodeslot
+ unicodes[contextname] = slot
+ utfcodes[contextname] = utfchar(slot)
end
local encname = adobename or contextname
if encname then
enccodes[encname] = k
end
end
+ for name,code in pairs(characters.synonyms) do
+ if not enccodes[name] then enccodes[name] = code end
+ end
end
--[[ldx--
@@ -89,7 +94,7 @@ use the table. After all, we have this information available anyway.</p>
function characters.context.define()
local unicodes, utfcodes = characters.context.unicodes, characters.context.utfcodes
- local flush, tc = tex.sprint, tex.ctxcatcodes
+ local flush, tc, char = tex.sprint, tex.ctxcatcodes, utf.char
for u, chr in pairs(characters.data) do
local contextname = chr.contextname
if contextname then
@@ -97,7 +102,7 @@ function characters.context.define()
if chr.unicodeslot < 128 then
flush(tc, "\\chardef\\" .. contextname .. "=" .. u) -- unicodes[contextname])
else
- flush(tc, "\\let\\" .. contextname .. "=" .. utf.char(u)) -- utfcodes[contextname])
+ flush(tc, "\\let\\" .. contextname .. "=" .. char(u)) -- utfcodes[contextname])
end
end
end
@@ -118,9 +123,10 @@ function characters.setcodes()
for code, chr in pairs(characters.data) do
local cc = chr.category
if cc == 'll' or cc == 'lu' or cc == 'lt' then
- if not chr.lccode then chr.lccode = code end
- if not chr.uccode then chr.uccode = code end
- flush(tc, '\\setcclcuc '.. code .. ' ' .. chr.lccode .. ' ' .. chr.uccode .. ' ')
+ local lc, uc = chr.lccode, chr.uccode
+ if not lc then chr.lccode, lc = code, code end
+ if not uc then chr.uccode, uc = code, code end
+ flush(tc, '\\setcclcuc '.. code .. ' ' .. lc .. ' ' .. uc .. ' ')
end
end
end
diff --git a/tex/context/base/char-syn.lua b/tex/context/base/char-syn.lua
new file mode 100644
index 000000000..a779e1a58
--- /dev/null
+++ b/tex/context/base/char-syn.lua
@@ -0,0 +1,140 @@
+if not modules then modules = { } end modules ['char-syn'] = {
+ version = 1.001,
+ comment = "companion to char-ini.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- thanks to tex4ht for these mappings
+
+characters.synonyms = {
+ angle = 0x2220,
+ anticlockwise = 0x21BA,
+ arrowaxisleft = 0x2190,
+ arrowaxisright = 0x2192,
+ arrowparrleftright = 0x21C6,
+ arrowparrrightleft = 0x21C4,
+ arrowtailleft = 0x21A2,
+ arrowtailright = 0x21A3,
+ arrowtripleleft = 0x21DA,
+ arrowtripleright = 0x21DB,
+ axisshort = 0x2212,
+ because = 0x2235,
+ between = 0x226C,
+ check = 0x2713,
+ circleasteris = 0x229B,
+ circleequal = 0x2257,
+ circleminus = 0x229D,
+ circleR = 0x24C7,
+ circlering = 0x229A,
+ circleS = 0x24C8,
+ clockwise = 0x21BB,
+ complement = 0x2201,
+ curlyleft = 0x21AB,
+ curlyright = 0x21AC,
+ dblarrowdwn = 0x21CA,
+ dblarrowheadleft = 0x219E,
+ dblarrowheadright = 0x21A0,
+ dblarrowleft = 0x21C7,
+ dblarrowright = 0x21C9,
+ dblarrowup = 0x21C8,
+ defines = 0x225C,
+ diamond = 0x2662,
+ diamondsolid = 0x2666,
+ difference = 0x224F,
+ dotplus = 0x2214,
+ downfall = 0x22CE,
+ equaldotleftright = 0x2252,
+ equaldotrightleft = 0x2253,
+ equalorfollows = 0x22DF,
+ equalorgreater = 0x22DD,
+ equalorless = 0x22DC,
+ equalorprecedes = 0x22DE,
+ equalsdots = 0x2251,
+ followsorcurly = 0x227D,
+ followsorequal = 0x227F,
+ forces = 0x22A9,
+ forcesbar = 0x22AA,
+ fork = 0x22D4,
+ frown = 0x2322,
+ geomequivalent = 0x224E,
+ greaterdbleqlless = 0x22Da,
+ greaterdblequal = 0x2267,
+ greaterlessequal = 0x22DA,
+ greaterorapproxeql = 0x227F,
+ greaterorequalslant= 0x2265,
+ greaterorless = 0x2277,
+ greaterorsimilar = 0x2273,
+ harpoondownleft = 0x21C3,
+ harpoondownright = 0x21C2,
+ harpoonleftright = 0x21CC,
+ harpoonrightleft = 0x21CB,
+ harpoonupleft = 0x21BF,
+ harpoonupright = 0x21BE,
+ intercal = 0x22BA,
+ intersectiondbl = 0x22D2,
+ lessdbleqlgreater = 0x22DB,
+ lessdblequal = 0x2266,
+ lessequalgreater = 0x22DB,
+ lessorapproxeql = 0x227E,
+ lessorequalslant = 0x2264,
+ lessorgreater = 0x2276,
+ lessorsimilar = 0x2272,
+ maltesecross = 0xFFFD,
+ measuredangle = 0x2221,
+ muchgreater = 0x22D9,
+ muchless = 0x22D8,
+ multimap = 0x22B8,
+ multiopenleft = 0x22CB,
+ multiopenright = 0x22CC,
+ nand = 0x22BC,
+ orunderscore = 0x22BB,
+ perpcorrespond = 0x2259,
+ precedesorcurly = 0x227C,
+ precedesorequal = 0x227E,
+ primereverse = 0x2035,
+ proportional = 0x221D,
+ revasymptequal = 0x2243,
+ revsimilar = 0x223D,
+ rightanglene = 0x231D,
+ rightanglenw = 0x231C,
+ rightanglese = 0x231F,
+ rightanglesw = 0x231E,
+ ringinequal = 0x2256,
+ satisfies = 0x22A8,
+ shiftleft = 0x21B0,
+ shiftright = 0x21B1,
+ smile = 0x2323,
+ sphericalangle = 0x2222,
+ square = 0x25A1,
+ squaredot = 0x22A1,
+ squareimage = 0x228F,
+ squareminus = 0x229F,
+ squaremultiply = 0x22A0,
+ squareoriginal = 0x2290,
+ squareplus = 0x229E,
+ squaresmallsolid = 0x25AA,
+ squaresolid = 0x25A0,
+ squiggleleftright = 0x21AD,
+ squiggleright = 0x21DD,
+ star = 0x22C6,
+ subsetdbl = 0x22D0,
+ subsetdblequal = 0x2286,
+ supersetdbl = 0x22D1,
+ supersetdblequa = 0x2287,
+ therefore = 0x2234,
+ triangle = 0x25B5,
+ triangledownsld = 0x25BE,
+ triangleinv = 0x25BF,
+ triangleleft = 0x25C3,
+ triangleleftequal = 0x22B4,
+ triangleleftsld = 0x25C2,
+ triangleright = 0x25B9,
+ trianglerightequal = 0x22B5,
+ trianglerightsld = 0x25B8,
+ trianglesolid = 0x25B4,
+ uniondbl = 0x22D3,
+ uprise = 0x22CF,
+ Yen = 0x00A5,
+}
diff --git a/tex/context/base/char-utf.lua b/tex/context/base/char-utf.lua
index 2d11a1794..ae81073bf 100644
--- a/tex/context/base/char-utf.lua
+++ b/tex/context/base/char-utf.lua
@@ -50,17 +50,19 @@ function characters.filters.utf.initialize()
local vs = v.specials
if vs and #vs == 3 and vs[1] == 'char' then
local first, second = uc(vs[2]), uc(vs[3])
- if not cg[first] then
- cg[first] = { }
+ local cgf = cg[first]
+ if not cgf then
+ cgf = { }
+ cg[first] = cgf
end
- cg[first][second] = uc(k)
+ cgf[second] = uc(k)
end
end
characters.filters.utf.initialized = true
end
end
-function characters.filters.utf.collapse(str)
+function characters.filters.utf.collapse(str) -- old one
if characters.filters.utf.collapsing and str and #str > 1 then
if not characters.filters.utf.initialized then -- saves a call
characters.filters.utf.initialize()
@@ -149,36 +151,115 @@ first snippet uses the relocated dollars.</p>
do
local cg = characters.graphemes
- local cr = characters.filters.utf.private.high
-
- function characters.filters.utf.collapse(str)
- if characters.filters.utf.collapsing and str then
+ local cr = characters.filters.utf.private.high -- kan via een lpeg
+ local cf = characters.filters.utf
+ local su = string.utfcharacters
+
+ local concat = table.concat
+
+ --~ keep this one, it's the baseline
+ --~
+ --~ function characters.filters.utf.collapse(str)
+ --~ if cf.collapsing and str then
+ --~ if #str > 1 then
+ --~ if not cf.initialized then -- saves a call
+ --~ cf.initialize()
+ --~ end
+ --~ local tokens, first, done = { }, false, false
+ --~ for second in su(str) do
+ --~ if cr[second] then
+ --~ if first then
+ --~ tokens[#tokens+1] = first
+ --~ end
+ --~ first, done = cr[second], true
+ --~ else
+ --~ local cgf = cg[first]
+ --~ if cgf and cgf[second] then
+ --~ first, done = cgf[second], true
+ --~ elseif first then
+ --~ tokens[#tokens+1] = first
+ --~ first = second
+ --~ else
+ --~ first = second
+ --~ end
+ --~ end
+ --~ end
+ --~ if done then
+ --~ tokens[#tokens+1] = first
+ --~ return concat(tokens,"") -- seldom called
+ --~ end
+ --~ elseif #str > 0 then
+ --~ return cr[str] or str
+ --~ end
+ --~ end
+ --~ return str
+ --~ end
+
+ --[[ldx--
+ <p>The next variant has lazy token collecting, on a 140 page mk.tex this saves
+ about .25 seconds, which is understandable because we have no graphmes and
+ not collecting tokens is not only faster but also saves garbage collecting.
+ </p>
+ --ldx]]--
+
+ function characters.filters.utf.collapse(str) -- not really tested (we could preallocate a table)
+ if cf.collapsing and str then
if #str > 1 then
- if not characters.filters.utf.initialized then -- saves a call
- characters.filters.utf.initialize()
+ if not cf.initialized then -- saves a call
+ cf.initialize()
end
- local tokens, first, done = { }, false, false
- for second in string.utfcharacters(str) do
- if cr[second] then
- if first then
- tokens[#tokens+1] = first
+ local tokens, first, done, n = { }, false, false, 0
+ for second in su(str) do
+ if done then
+ if cr[second] then
+ if first then
+ tokens[#tokens+1] = first
+ end
+ first = cr[second]
+ else
+ local cgf = cg[first]
+ if cgf and cgf[second] then
+ first = cgf[second]
+ elseif first then
+ tokens[#tokens+1] = first
+ first = second
+ else
+ first = second
+ end
end
- first, done = cr[second], true
else
- local cgf = cg[first]
- if cgf and cgf[second] then
- first, done = cgf[second], true
- elseif first then
- tokens[#tokens+1] = first
- first = second
+ if cr[second] then
+ for s in su(str) do
+ if n == 0 then
+ break
+ else
+ tokens[#tokens+1], n = s, n - 1
+ end
+ end
+ if first then
+ tokens[#tokens+1] = first
+ end
+ first, done = cr[second], true
else
- first = second
+ local cgf = cg[first]
+ if cgf and cgf[second] then
+ for s in su(str) do
+ if n == 0 then
+ break
+ else
+ tokens[#tokens+1], n = s, n -1
+ end
+ end
+ first, done = cgf[second], true
+ else
+ first, n = second, n + 1
+ end
end
end
end
if done then
tokens[#tokens+1] = first
- return table.concat(tokens,"")
+ return concat(tokens,"") -- seldom called
end
elseif #str > 0 then
return cr[str] or str
@@ -187,6 +268,53 @@ do
return str
end
+ --~ not faster (0.1 seconds on a 500 k collapsable file)
+ --~
+ --~ local specials, initials = lpeg.P(false), ""
+ --~ for k,v in pairs(cr) do
+ --~ specials, initials = specials + lpeg.P(k)/v, initials .. k:sub(1,1)
+ --~ end
+ --~ specials = lpeg.Cs(lpeg.P((1-lpeg.S(initials)) + specials)^0)
+ --~ local graphemes = ""
+ --~ for _, v in pairs(cg) do
+ --~ for kk, _ in pairs(v) do
+ --~ graphemes = graphemes .. kk:sub(1,1)
+ --~ end
+ --~ end
+ --~ graphemes = lpeg.P{ lpeg.S(graphemes) + 1 * lpeg.V(1) }
+ --~
+ --~ function characters.filters.utf.collapse(str)
+ --~ if cf.collapsing and str then
+ --~ if #str > 1 then
+ --~ str = specials:match(str)
+ --~ if graphemes:match(str) then
+ --~ if not cf.initialized then -- saves a call
+ --~ cf.initialize()
+ --~ end
+ --~ local tokens, first, done = { }, false, false
+ --~ for second in su(str) do
+ --~ local cgf = cg[first]
+ --~ if cgf and cgf[second] then
+ --~ first, done = cgf[second], true
+ --~ elseif first then
+ --~ tokens[#tokens+1] = first
+ --~ first = second
+ --~ else
+ --~ first = second
+ --~ end
+ --~ end
+ --~ if done then
+ --~ tokens[#tokens+1] = first
+ --~ return table.concat(tokens,"")
+ --~ end
+ --~ end
+ --~ elseif #str > 0 then
+ --~ return cr[str] or str
+ --~ end
+ --~ end
+ --~ return str
+ --~ end
+
end
--[[ldx--
diff --git a/tex/context/base/colo-new.lua b/tex/context/base/colo-new.lua
index 842e9c15a..b009c5e9b 100644
--- a/tex/context/base/colo-new.lua
+++ b/tex/context/base/colo-new.lua
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['colo-ini'] = {
license = "see context related readme files"
}
+-- split_settings -> aux.settings_to_hash
+
-- for the moment this looks messy but we're waiting for a pdf backend interface
--
-- code collected here will move and be adapted
@@ -42,7 +44,7 @@ do
function backends.pdf.registerspotcolorname(name,e)
if e and e ~= "" then
- tex.sprint(tex.ctxcatcodes,string.format(s_template_e,name,e))
+ tex.sprint(tex.ctxcatcodes,string.format(s_template_e,name,e)) -- todo in new backend: e:gsub(" ","#20")
end
end
@@ -149,7 +151,7 @@ do
elseif kind == 4 then
backend.registercmykspotcolor(parent,f,d,p,v[6],v[7],v[8],v[9])
end
- backends.pdf.registerspotcolorname(name,e)
+ backends.pdf.registerspotcolorname(parent,e)
end
end
@@ -473,18 +475,33 @@ end
-- literals needed to inject code in the mp stream, we cannot use attributes there
-- since literals may have qQ's
-function ctx.pdfrgbliteral(model,r,g,b)
- tex.sprint(tex.ctxcatcodes,string.format("\\pdfliteral{%s}",ctx.pdfcolor(model,colors.register('color',nil,'rgb',r,g,b))))
-end
-function ctx.pdfcmykliteral(model,c,m,y,k)
- tex.sprint(tex.ctxcatcodes,string.format("\\pdfliteral{%s}",ctx.pdfcolor(model,colors.register('color',nil,'cmyk',c,m,y,k))))
-end
-function ctx.pdfgrayliteral(model,s)
- tex.sprint(tex.ctxcatcodes,string.format("\\pdfliteral{%s}",ctx.pdfcolor(model,colors.register('color',nil,'gray',s))))
-end
-function ctx.pdfspotliteral(model,n,f,d,p)
- tex.sprint(tex.ctxcatcodes,string.format("\\pdfliteral{%s}",ctx.pdfcolor(model,colors.register('color',nil,'spot',n,f,d,p)))) -- incorrect
-end
-function ctx.pdftransparencyliteral(a,t)
- tex.sprint(tex.ctxcatcodes,string.format("\\pdfliteral{/Tr%s gs}",transparencies.register(nil,a,t)))
+do
+
+ local format, sprint = string.format, tex.sprint
+
+ local intransparency = false
+
+ function ctx.pdfrgbliteral(model,r,g,b)
+ sprint(tex.ctxcatcodes,format("\\pdfliteral{%s}",ctx.pdfcolor(model,colors.register('color',nil,'rgb',r,g,b))))
+ end
+ function ctx.pdfcmykliteral(model,c,m,y,k)
+ sprint(tex.ctxcatcodes,format("\\pdfliteral{%s}",ctx.pdfcolor(model,colors.register('color',nil,'cmyk',c,m,y,k))))
+ end
+ function ctx.pdfgrayliteral(model,s)
+ sprint(tex.ctxcatcodes,format("\\pdfliteral{%s}",ctx.pdfcolor(model,colors.register('color',nil,'gray',s))))
+ end
+ function ctx.pdfspotliteral(model,n,f,d,p)
+ sprint(tex.ctxcatcodes,format("\\pdfliteral{%s}",ctx.pdfcolor(model,colors.register('color',nil,'spot',n,f,d,p)))) -- incorrect
+ end
+ function ctx.pdftransparencyliteral(a,t)
+ intransparency = true
+ sprint(tex.ctxcatcodes,format("\\pdfliteral{/Tr%s gs}",transparencies.register(nil,a,t)))
+ end
+ function ctx.pdffinishtransparency()
+ if intransparency then
+ intransparency = false
+ sprint(tex.ctxcatcodes,"\\pdfliteral{/Tr0 gs}") -- we happen to know this -)
+ end
+ end
+
end
diff --git a/tex/context/base/colo-new.mkii b/tex/context/base/colo-new.mkii
index 9bef82710..ac8b86715 100644
--- a/tex/context/base/colo-new.mkii
+++ b/tex/context/base/colo-new.mkii
@@ -904,6 +904,12 @@
%D page color. This macro is used in the same way as
%D \type {\color}.
+\def\startregistercolor[#1]%
+ {\permitcolormodefalse\startcolor[#1]\permitcolormodetrue}
+
+\def\stopregistercolor
+ {\permitcolormodefalse\stopcolor\permitcolormodetrue}
+
\def\starttextcolor[#1]%
{\doifsomething{#1}
{\bgroup
diff --git a/tex/context/base/colo-new.mkiv b/tex/context/base/colo-new.mkiv
index e7f8dfd17..38cbd7339 100644
--- a/tex/context/base/colo-new.mkiv
+++ b/tex/context/base/colo-new.mkiv
@@ -106,6 +106,13 @@
\csname(ts:#1)\endcsname
\fi\fi}
+\let\normaldoactivatecolor\doactivatecolor
+
+\def\doactivatecolor
+ {\ctxlua{colors.enabled=true}%
+ \let\doactivatecolor\normaldoactivatecolor
+ \doactivatecolor}
+
\def\deactivatecolor
{\doresetattribute\s!color
\doresetattribute\s!transparency}
diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv
index c97575baa..74d4173a3 100644
--- a/tex/context/base/cont-new.mkiv
+++ b/tex/context/base/cont-new.mkiv
@@ -19,27 +19,66 @@
\enablemode[mkiv]
+% potential new defaults:
+
+% \setbreakpoints[compound]
+
\unprotect
-\appendtoks
- \ctxlua{garbagecollector.update()}%
-\to \everyshipout
+\ifx\clearmarks\undefined
+ \def\clearmarks {\begingroup\afterassignment\doclearmarks\scratchcounter}
+ \def\doclearmarks{\normalmarks\scratchcounter{}\endgroup}
+\fi
+
+\def\resetmark#1% we cannot use \normalmarks#1{}
+ {\global\@EA\chardef\csname\@@mrk\string#1\endcsname\zerocount
+ \@EA\clearmarks\csname\@@prk\string#1\endcsname
+ \global\@EA\let\csname\@@trk\string#1\endcsname\empty
+ \global\@EA\let\csname\@@frk\string#1\endcsname\empty
+ \global\@EA\let\csname\@@brk\string#1\endcsname\empty
+ \global\@EA\let\csname\@@crk\string#1\endcsname\empty}
+
+% \appendtoks
+% \ctxlua{garbagecollector.update()}%
+% \to \everyshipout
% texmf.instance will become just texmf
+%D Since this can be a showstopper, we report the path at the beginning
+%D as well as at the end of a run.
+
+\writestatus\m!lua{used config path - \ctxlua{tex.print(caches.configpath(texmf.instance))}}
+\writestatus\m!lua{used cache path - \ctxlua{tex.print(caches.path)}}
+
+%D For the moment we report some statistics. Later this will become an option,
+%D but for now we need this information.
+
\appendtoks
+ \writestatus\m!lua{used config path - \ctxlua{tex.print(caches.configpath(texmf.instance))}}%
+ \writestatus\m!lua{used cache path - \ctxlua{tex.print(caches.path)}}%
+ \writestatus\m!lua{modules/dumps/instances - \ctxlua{tex.print((status.luabytecodes-500).."/"..input.storage.done.."/"..status.luastates)}}%
\writestatus\m!lua{input load time - \ctxlua{input.loadtime(texmf.instance)} seconds}%
\writestatus\m!lua{fonts load time - \ctxlua{input.loadtime(fonts)} seconds}%
\writestatus\m!lua{xml load time - \ctxlua{input.loadtime(lxml)} seconds}%
\writestatus\m!lua{mps conversion time - \ctxlua{input.loadtime(mptopdf)} seconds}%
- \writestatus\m!lua{node processing time - \ctxlua{input.loadtime(nodes)} seconds}%
+ \writestatus\m!lua{node processing time - \ctxlua{input.loadtime(nodes)} seconds (including kernel)}%
+ \writestatus\m!lua{kernel processing time - \ctxlua{input.loadtime(kernel)} seconds}%
\writestatus\m!lua{attribute processing time - \ctxlua{input.loadtime(attributes)} seconds}%
- \writestatus\m!lua{used config path - \ctxlua{tex.print(caches.configpath(texmf.instance))}}%
- \writestatus\m!lua{used cache path - \ctxlua{tex.print(caches.path)}}%
- \writestatus\m!lua{modules/dumps/instances - \ctxlua{tex.print((status.luabytecodes-500).."/"..input.storage.done.."/"..status.luastates)}}%
- \writestatus\m!lua{current memory usage - \ctxlua{tex.print(status.luastate_bytes)} bytes}%
- \writestatus\m!lua{language load time - \ctxlua{input.loadtime(languages)} seconds (n=\ctxlua{tex.print(languages.n())})}%
+ \writestatus\m!lua{language load time - \ctxlua{input.loadtime(languages)} seconds (n=\ctxlua{tex.print(languages.hyphenation.n())})}%
\writestatus\m!lua{loaded fonts - \ctxlua{tex.print(fonts.logger.report())}}%
+ \writestatus\m!lua{loaded patterns - \ctxlua{tex.print(languages.logger.report())}}%
+ \writestatus\m!lua{current memory usage - \ctxlua{tex.print(status.luastate_bytes)} bytes}%
+ \writestatus\m!lua{cleaned up reserved nodes - \ctxlua{
+ tex.print(string.format("\letterpercent s nodes, \letterpercent s lists (of \letterpercent s)", nodes.cleanup_reserved(\number\topofboxstack)))
+ }}%
+\to \everybye
+
+% \appendtoks
+% \ctxlua{nodes.check_for_leaks()}%
+% \to \everygoodbye
+
+\appendtoks
+ \writestatus{remark}{temporary fallback to base mode for tlig and trep}% end of font-otf.lua
\to \everybye
\def\resettimer {\ctxlua{environment.starttime = os.clock()}}
@@ -49,10 +88,11 @@
%D For me.
\def\traceluausage
+ {\dosingleempty\dotraceluausage}
+
+\def\dotraceluausage[#1]%
{\ctxlua{debugger.enable()}%
- \appendtoks
- \ctxlua{debugger.disable() debugger.showstats(texio.write,5000)}%
- \to \everybye}
+ \appendtoks\ctxlua{debugger.disable() debugger.showstats(print,\doifnumberelse{#1}{#1}{5000})}\to\everybye}
%D Fonts (experimental AFM loading}
@@ -109,9 +149,9 @@
\definestartstop[randomized][\c!before=\dosetattribute{case}\plusfour,\c!after=]
-\def\WORD{\groupedcommand{\dosetattribute{case}\plusone }{}}
-\def\word{\groupedcommand{\dosetattribute{case}\plustwo }{}}
-\def\Word{\groupedcommand{\dosetattribute{case}\plusthree}{}} % \plusfour
+\def\WORD{\groupedcommand{\setcharactercasing[\plusone ]}{}}
+\def\word{\groupedcommand{\setcharactercasing[\plustwo ]}{}}
+\def\Word{\groupedcommand{\setcharactercasing[\plusthree]}{}} % \plusfour
\let\WORDS\WORD
\let\words\word
@@ -121,3 +161,17 @@
% \expanded{\defineactivecharacter \number"2000E} {\textdir TRT\relax}
% \expanded{\defineactivecharacter \number"2000F} {\textdir TLT\relax}
+
+\startluacode
+ local ss = { }
+ function ctx.writestatus(a,b)
+ local s = ss[a]
+ if not ss[a] then
+ s = a:rpadd(15) .. ":"
+ ss[a] = s
+ end
+ texio.write_nl(s .. b)
+ end
+\stopluacode
+
+\def\writestatus#1#2{\ctxlua{ctx.writestatus([[#1]],[[#2]])}}
diff --git a/tex/context/base/cont-new.tex b/tex/context/base/cont-new.tex
index 03b7ed27c..9e3a4eb37 100644
--- a/tex/context/base/cont-new.tex
+++ b/tex/context/base/cont-new.tex
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2007.09.28 11:58}
+\newcontextversion{2007.12.05 13:56}
%D This file is loaded at runtime, thereby providing an
%D excellent place for hacks, patches, extensions and new
@@ -53,7 +53,7 @@
\def\floatsetupcontent {\copy\nextbox}%
\def\floatsetupwidth {\wd\nextbox}%
\def\floatsetupheight {\ht\nextbox}%
- \def\placesetupfloat[##1]{\placefloat[##1][#2][#3]{\floatsetupcaption}{\floatsetupcontent}}%
+ \def\placesetupfloat[##1]{\placefloat[##1][#2][#3]{#4}{\floatsetupcontent}}% #4 and not \floatsetupcaption (unexpanded)
\dowithnextbox{\setups[#1]}\vbox}
\chardef\baselinegridmode=0 % option in layout / 1=permit_half_lines
@@ -1040,13 +1040,15 @@
\let\normaltype\type
-\beginTEX
- \unexpanded\def\retype#1{\bgroup\convertargument#1\to\ascii\@EA\normaltype\@EA{\ascii}\egroup}
-\endTEX
-
-\beginETEX
- \unexpanded\def\retype#1{\scantokens{\normaltype{#1}\ignorespaces}}
-\endETEX
+\ifx\scantextokens\undefined
+ \ifx\scantokens\undefined
+ \unexpanded\def\retype#1{\bgroup\convertargument#1\to\ascii\@EA\normaltype\@EA{\ascii}\egroup}
+ \else
+ \unexpanded\def\retype#1{\scantokens{\normaltype{#1}\ignorespaces}\relax}
+ \fi
+\else
+ \unexpanded\def\retype#1{\scantextokens{\normaltype{#1}}}
+\fi
\def\simplifytype{\let\type\retype}
diff --git a/tex/context/base/context.tex b/tex/context/base/context.tex
index 2c807f93c..c43b89b72 100644
--- a/tex/context/base/context.tex
+++ b/tex/context/base/context.tex
@@ -42,7 +42,7 @@
%D your styles an modules.
\edef\contextformat {\jobname}
-\edef\contextversion{2007.09.28 11:58}
+\edef\contextversion{2007.12.05 13:56}
%D For those who want to use this:
@@ -372,7 +372,8 @@
\loadcorefile{page-lyr.tex}
\loadcorefile{page-mak.tex}
\loadcorefile{page-num.tex}
-\loadcorefile{page-lin.tex}
+\loadmarkfile{page-lin}
+\loadcorefile{page-par.tex}
\loadcorefile{page-mar.tex}
\loadcorefile{core-job.tex} % why so late?
diff --git a/tex/context/base/core-buf.lua b/tex/context/base/core-buf.lua
index 6277a95ed..081655a72 100644
--- a/tex/context/base/core-buf.lua
+++ b/tex/context/base/core-buf.lua
@@ -162,13 +162,13 @@ function buffers.inspect(name)
if v == "" then
tex.sprint(tex.ctxcatcodes,"[crlf]\\par ")
else
- tex.sprint(tex.ctxcatcodes,(string.gsub("(.)",function(c)
+ tex.sprint(tex.ctxcatcodes,(buffers.data[name]:gsub("(.)",function(c)
return " [" .. string.byte(c) .. "] "
end)) .. "\\par")
end
end
else
- tex.sprint(tex.ctxcatcodes,(string.gsub(buffers.data[name],"(.)",function(c)
+ tex.sprint(tex.ctxcatcodes,(buffers.data[name]:gsub("(.)",function(c)
return " [" .. string.byte(c) .. "] "
end)))
end
@@ -354,7 +354,7 @@ buffers.open_nested = string.rep("\\char"..string.byte('<').." ",2)
buffers.close_nested = string.rep("\\char"..string.byte('>').." ",2)
function buffers.replace_nested(result)
- return (string.gsub(string.gsub(result,buffers.open_nested,"{"),buffers.close_nested,"}"))
+ return (string.gsub(result:gsub(buffers.open_nested,"{"),buffers.close_nested,"}"))
end
function buffers.flush_result(result,nested)
diff --git a/tex/context/base/core-des.tex b/tex/context/base/core-des.tex
index d0152fddc..f11721c96 100644
--- a/tex/context/base/core-des.tex
+++ b/tex/context/base/core-des.tex
@@ -828,9 +828,7 @@
\def\do@@label[#1][#2]%
{\numberparameter{#1}\c!before
\numberparameter{#1}\c!command
- {\doattributes{\@@thenumber{#1}}\c!headstyle\c!headcolor
- {\dotextprefix{\numberparameter{#1}\c!text}%
- \getvalue{\e!next#1}[#2]}}%
+ {\doattributes{\@@thenumber{#1}}\c!headstyle\c!headcolor{\getvalue{\e!next#1}[#2]}}%
\numberparameter{#1}\c!after}%
\def\do@@nextlabel[#1][#2]%
diff --git a/tex/context/base/core-fig.tex b/tex/context/base/core-fig.tex
index 104b753ec..27825ba5c 100644
--- a/tex/context/base/core-fig.tex
+++ b/tex/context/base/core-fig.tex
@@ -1002,6 +1002,14 @@
\def\dogetfiguredimensionsonly[#1][#2]%
{\dogetfiguredimensions[#1][#2]%
\doresetobjects}
+
+\def\doiffigureelse#1%
+ {\getfiguredimensions[#1]%
+ \ifcase\figurewidth
+ \expandafter\secondoftwoarguments
+ \else
+ \expandafter\firstoftwoarguments
+ \fi}
%D Size determination.
%D
@@ -1408,7 +1416,7 @@
\def\doexternalfigure[#1][#2][#3]% [label][file][settings] | [file][settings] | [file][parent][settings]
{\bgroup
\doifelsenothing{#1}
- {\framed[\c!width=\defaultfigurewidth,\c!height=\defaultfigureheight]{external\\figure}}
+ {\framed[\c!width=\defaultfigurewidth,\c!height=\defaultfigureheight]{external\\figure\\no name}}
{\doifundefinedelse{\??ef\??ef#1}
{\useexternalfigure[\s!dummy][#1][#2][#3]%
\getvalue{\??ef\??ef\s!dummy}[]} % [] is dummy arg 5
diff --git a/tex/context/base/core-itm.tex b/tex/context/base/core-itm.tex
index 42d45a5df..97b102e4e 100644
--- a/tex/context/base/core-itm.tex
+++ b/tex/context/base/core-itm.tex
@@ -214,7 +214,7 @@
\def\doinitializeitemgrouplevel#1%
{\copyparameters
[\??op\currentitemgroup#1][\??oo]
- [\c!width,\c!factor,\c!distance,\c!align,\c!option,
+ [\c!width,\c!factor,\c!distance,\c!align,\c!symalign,\c!option,
\c!style,\c!marstyle,\c!symstyle,\c!headstyle,
\c!color,\c!marcolor,\c!symcolor,\c!headcolor,
\c!beforehead,\c!afterhead,\c!before,\c!inbetween,\c!after,
@@ -606,6 +606,16 @@
\ifx\startcolumns\undefined \def\startcolumns[#1]{} \fi
\ifx\stopcolumns \undefined \let\stopcolumns\relax \fi
+\def\dosetsymalign#1% hm, we should use one of the core-spa macros or make a helper
+ {\processaction
+ [#1]
+ [ \v!flushleft=>\let\symalignleft\relax,
+ \v!right=>\let\symalignleft\relax,
+ \v!flushright=>\let\symalignleft\hfill,
+ \v!left=>\let\symalignleft\hfill,
+ \v!middle=>\let\symalignleft\hfil,
+ \v!center=>\let\symalignleft\hfil]}
+
\def\redostartitemgroup[#1][#2]%
{\setfalse\inlinelistitem % new, no indent (leftskip)
\setfalse\concatnextitem % new, concat
@@ -642,6 +652,7 @@
\let\marsymbol\relax
\globallet\doitemdestination\empty
\let\symsymbol\empty
+ \let\symalignleft\relax
\the\itemgroupcommands
\checkcurrentnofitems
% \getitemparameter\itemlevel\empty
@@ -672,6 +683,7 @@
\doadaptrightskip{\getitemparameter1\c!rightmargin}%
\fi
\dosetraggedcommand{\getitemparameter\itemlevel\c!align}\raggedcommand
+ \dosetsymalign{\getitemparameter\itemlevel\c!symalign}%
\doifsomething{\getitemparameter\itemlevel\c!indenting}
{% is \expanded needed?
\expanded{\setupindenting[\getitemparameter\itemlevel\c!indenting]}}%
@@ -772,11 +784,17 @@
\dontrechecknextindentation
\fi
\fi
- \endgroup
- \doglobal\decrement(\itemlevel,\itemincrement)%
- \egroup
- % new needed in sidefloats (surfaced in volker's proceedings)
- \ifconditional\textlistitem\else\par\fi
+ % new test, needed in sidefloats (surfaced in volker's proceedings)
+ \ifconditional\textlistitem % else forgotten
+ \endgroup
+ \doglobal\decrement(\itemlevel,\itemincrement)%
+ \egroup
+ \else
+ \endgroup
+ \doglobal\decrement(\itemlevel,\itemincrement)%
+ \egroup
+ \par
+ \fi
\dorechecknextindentation}
\newtoks\itemgroupcommands
@@ -917,7 +935,7 @@
\else
\scratchdimen\z@
\fi
- \llap{\hbox to \dimen0{\ifconditional\sublistitem\llap{+}\fi\box8\hfill}}%
+ \llap{\hbox to \dimen0{\ifconditional\sublistitem\llap{+}\fi\box8\hss}}% was: \hfill
\hskip\scratchdimen}
\def\optimizelistitemsbreak
@@ -998,12 +1016,17 @@
\ifconditional\textlistitem
\hbox{\ifconditional\sublistitem+\fi\box8\hskip\interwordspace}\nobreak
\else\ifconditional\inlinelistitem
- \hbox to \dimen0{\ifconditional\sublistitem\llap{+}\fi\box8\hfill}%
+ \hbox to \dimen0{\ifconditional\sublistitem\llap{+}\fi\box8\hss}% was: \hfill
\else\ifconditional\txtlistitem
\dodotxtitem
\else
% todo: align+marge binnen de hbox
- \llap{\hbox to \dimen0{\ifconditional\sublistitem\llap{+}\fi\box8\hfill}}%
+% \llap{\hbox to \dimen0{\ifconditional\sublistitem\llap{+}\fi\box8\hfill}}%
+ \llap{\hbox to \dimen0{\ifconditional\sublistitem\llap{+}\fi
+ \symalignleft
+ \box8\hfil
+ \hskip\getitemparameter\itemlevel\c!distance% T h
+ }}%
\fi\fi\fi
\fi
\forceunexpanded % needed for m conversion (\os) / i need to look into this
@@ -1249,6 +1272,7 @@
\c!distance=.5em,
%\c!align=\v!normal, % definitely not \v!normal, see mails and
\c!align=, % debug reports of David A & Patrick G on context list
+ \c!symalign=,
\c!color=,
\c!indenting=, % untouched if empty
\c!color=,
diff --git a/tex/context/base/core-lst.tex b/tex/context/base/core-lst.tex
index 7c26b97f7..b73ed388a 100644
--- a/tex/context/base/core-lst.tex
+++ b/tex/context/base/core-lst.tex
@@ -278,7 +278,7 @@
\c!textstyle,\c!textcolor,\c!textcommand,
\c!pagestyle,\c!pagecommand,\c!pagecolor,
\c!numberstyle,\c!numbercolor,\c!numbercommand,
-\c!headnumber,
+ \c!headnumber,
\c!pagenumber,\c!pageboundaries,\c!margin,\c!symbol,\c!limittext,
\c!aligntitle,\c!before,\c!after,\c!inbetween,\v!part\c!number,\c!label]%
\getparameters[\??li#1][#3]}}%
@@ -802,24 +802,45 @@
\midaligned
{}}
+% \def\dodofixdlistelementEFG#1#2#3#4#5#6#7#8% keep this one here as reference
+% {\noindent
+% \hbox
+% {#1% in case E nils the strut
+% \let\\=\newlineinlist
+% \setbox0\hbox
+% {#2{\showcontrastlocation\??ia{#8}%
+% {\dostartlistattributes\c!style\c!color\empty
+% \ignorespaces\dontconvertfont\setstrut
+% \begstrut
+% %\doifelsenothing{\listparameter\c!maxwidth}
+% % {\listparameter\c!textcommand{#6}}
+% % {\listparameter\c!textcommand{\limitatetext{#6}{\listparameter\c!maxwidth}{\unknown}}}%
+% \limitatedlistentry{#6}%
+% \endstrut % struts new
+% \dostoplistattributes}}}%
+% \linklisttoelement{#4}{#7}{#8}{\box0}}%{\copy0}}%
+% \par % should be an option
+% \listparameter\c!inbetween}
+
\def\dodofixdlistelementEFG#1#2#3#4#5#6#7#8%
{\noindent
- \hbox
- {#1% in case E nils the strut
- \let\\=\newlineinlist
- \setbox0\hbox
- {#2{\showcontrastlocation\??ia{#8}%
- {\dostartlistattributes\c!style\c!color\empty
- \ignorespaces\dontconvertfont\setstrut
- \begstrut
- %\doifelsenothing{\listparameter\c!maxwidth}
- % {\listparameter\c!textcommand{#6}}
- % {\listparameter\c!textcommand{\limitatetext{#6}{\listparameter\c!maxwidth}{\unknown}}}%
- \limitatedlistentry{#6}%
- \endstrut % struts new
- \dostoplistattributes}}}%
- \linklisttoelement{#4}{#7}{#8}{\box0}}%{\copy0}}%
- \par % should be an option
+ \bgroup
+ \def\makelistelement##1##2% isolated by Wolfgang Schuster
+ {\doifelse{\listparameter\c!interaction}{##1}
+ {#2{##2}}
+ {\setbox0\hbox{#2{\showcontrastlocation\??ia{#8}{##2}}}%
+ \linklisttoelement{#4}{#7}{#8}{\box0}}}%
+ \makelistelement\v!no
+ {\let\\=\newlineinlist
+ #1% in case E nils the strut (still needed?)
+ \dostartlistattributes\c!style\c!color\empty
+ \ignorespaces\dontconvertfont\setstrut
+ \begstrut
+ \limitatedlistentry{#6}%
+ \endstrut
+ \dostoplistattributes}%
+ \egroup
+ \par
\listparameter\c!inbetween}
% better:
diff --git a/tex/context/base/core-mis.tex b/tex/context/base/core-mis.tex
index 9d3682d69..8459caab2 100644
--- a/tex/context/base/core-mis.tex
+++ b/tex/context/base/core-mis.tex
@@ -1336,6 +1336,14 @@
\c!right={\symbol[\c!rightquote]}]
\definedelimitedtext
+ [\v!blockquote][\v!quotation]
+
+\setupdelimitedtext
+ [\v!blockquote]
+ [\c!left=,
+ \c!right=]
+
+\definedelimitedtext
[\v!speech][\v!quotation]
\setupdelimitedtext
@@ -2605,7 +2613,7 @@
\def\dorotatenextbox#1#2%
{\doifsomething{#1}
- {\edef\@@rorotation{\number#1}% get rid of leading zeros and spaces
+ {\edef\@@rorotation{\realnumber{#1}}% get rid of leading zeros and spaces
\setbox\nextbox\vbox{\flushnextbox}% not really needed
\dodorotatenextbox\@@rorotation#2}%
\hbox{\boxcursor\flushnextbox}}
diff --git a/tex/context/base/core-new.tex b/tex/context/base/core-new.tex
index 9155c9dab..1dd989c37 100644
--- a/tex/context/base/core-new.tex
+++ b/tex/context/base/core-new.tex
@@ -33,10 +33,6 @@
\def\dosetupsB[#1]{\cleanuplabel{#1}\processcommacommand[\cleanlabel]\dosetups} % [..]
\def\dosetupsC[#1]{\cleanuplabel{#1}\dosetups\cleanlabel} % [..]
-% \def\dosetups#1% the grid option will be extended to other main modes
-% {\executeifdefined{\??su\ifgridsnapping\v!grid\fi:#1}
-% {\executeifdefined{\??su :#1}\empty}}
-
\def\dosetups#1% the grid option will be extended to other main modes
{\executeifdefined{\??su\ifgridsnapping\v!grid\fi:#1}
{\executeifdefined{\??su :#1}\gobbleoneargument}\empty} % takes one argument
@@ -44,6 +40,20 @@
\def\setupwithargument#1% the grid option will be extended to other main modes
{\executeifdefined{\??su:#1}\gobbleoneargument}
+% somehow fails ...
+%
+% \letvalue{\??su:..}\gobbleoneargument
+%
+% \def\dosetups#1% the grid option will be extended to other main modes
+% {\csname \??su
+% \ifcsname\??su\ifgridsnapping\v!grid\fi:#1\endcsname\v!grid:#1\else
+% \ifcsname\??su :#1\endcsname :#1\else
+% :..\fi\fi
+% \endcsname\empty} % takes one argument
+%
+% \def\setupwithargument#1% the grid option will be extended to other main modes
+% {\csname\??su:\ifcsname\??su:#1\endcsname#1\else..\fi\endcsname}
+
\let\directsetup\dosetups
\def\doifsetupselse#1% to be done: grid
@@ -54,7 +64,7 @@
\def\startsetups {\xxstartsetups\plusone \stopsetups } \let\stopsetups \relax
\def\startlocalsetups{\xxstartsetups\plusone \stoplocalsetups} \let\stoplocalsetups\relax
\def\startrawsetups {\xxstartsetups\zerocount\stoprawsetups } \let\stoprawsetups \relax
-\def\startxmlsetups {\xxstartsetups\plustwo\stopxmlsetups } \let\stopxmlsetups \relax
+\def\startxmlsetups {\xxstartsetups\plustwo \stopxmlsetups } \let\stopxmlsetups \relax
\def\xxstartsetups#1#2%
{\begingroup\chardef\setupseolmode#1\doifnextcharelse[{\startsetupsA#2}{\startsetupsB#2}}
diff --git a/tex/context/base/core-ntb.tex b/tex/context/base/core-ntb.tex
index a98609d9a..a57739c8b 100644
--- a/tex/context/base/core-ntb.tex
+++ b/tex/context/base/core-ntb.tex
@@ -23,6 +23,19 @@
%D optimizations were rejected in order not to complicate this
%D module too much (and in order to prevail extensibility).
+% \starttext
+% \placefigure[left]{}{}
+% \startlinecorrection \dontleavehmode \bTABLE
+% \bTR \bTD oeps \eTD \eTR
+% \eTABLE
+% \stoplinecorrection
+% \placefigure[right]{}{}
+% \startlinecorrection \dontleavehmode \bTABLE
+% \bTR \bTD oeps \eTD \eTR
+% \eTABLE
+% \stoplinecorrection
+% \stoptext
+
%D To Do:
%D
%D \starttyping
diff --git a/tex/context/base/core-pgr.tex b/tex/context/base/core-pgr.tex
index ce7fb2459..1a4508b7f 100644
--- a/tex/context/base/core-pgr.tex
+++ b/tex/context/base/core-pgr.tex
@@ -498,6 +498,7 @@
gridtype=0,
linetype=1,
filltype=1,
+ dashtype=0,
%snaptops=true, % not that nice: true/false
gridcolor=red,
linecolor=blue,
@@ -506,7 +507,8 @@
linewidth=\linewidth,
gridwidth=\linewidth,
gridshift=\!!zeropoint,
- lineradius=.5\bodyfontsize]
+ lineradius=.5\bodyfontsize,
+ dashtype=1]
\startuseMPgraphic{mpos:par:shape}
\iftracepositions show_par \else draw_par \fi ;
@@ -516,6 +518,7 @@
boxgridtype := \MPvar{gridtype} ;
boxlinetype := \MPvar{linetype} ;
boxfilltype := \MPvar{filltype} ;
+ boxdashtype := \MPvar{dashtype} ;
boxgridcolor := \MPvar{gridcolor} ;
boxlinecolor := \MPvar{linecolor} ;
boxfillcolor := \MPvar{fillcolor} ;
@@ -620,7 +623,7 @@
\newcounter\textbackgrounddepth
\appendtoks
- \savecurrentvalue\totalnofparbackgrounds\nofparbackgrounds
+ \expanded{\savecurrentvalue\noexpand\totalnofparbackgrounds{\number\nofparbackgrounds}}%
\to \everybye
\appendtoks
@@ -628,7 +631,7 @@
\to \everystarttext
\ifx\totalnofparbackgrounds\undefined \newcounter\totalnofparbackgrounds \fi
-\ifx\nofparbackgrounds \undefined \newcounter\nofparbackgrounds \fi
+\ifx\nofparbackgrounds \undefined \newcount \nofparbackgrounds \fi
\def\initializeparbackgrounds
{\ifcase\totalnofparbackgrounds\else
@@ -684,20 +687,18 @@
\def\dostarttextbackground[#1][#2]%
{\checktextbackgrounds
\def\currenttextbackground{#1}%
- \doglobal\increment\nofparbackgrounds
- %\edef\currentparbackground{background:\nofparbackgrounds}%
- \edef\currentparbackground{pbg:\nofparbackgrounds}%
- \bgroup
- \increment\nofparbackgrounds
- %\xdef\nextparbackground{background:\nofparbackgrounds}%
- \xdef\nextparbackground{pbg:\nofparbackgrounds}%
- \egroup
+ \global\advance\nofparbackgrounds\plusone
+ \edef\currentparbackground{pbg:\number\nofparbackgrounds}%
+% \bgroup
+% \advance\nofparbackgrounds\plusone
+% \xdef\nextparbackground{pbg:\number\nofparbackgrounds}%
+% \egroup
+ \xdef\nextparbackground{pbg:\number\numexpr\nofparbackgrounds+\plusone\relax}% still xdef ?
% todo : \synchonizepositionpage{b:\currentparbackground}{s:\currentparbackground}%
\setuptextbackground[#1][#2]%
\let\dodostarttextbackground\relax
\let\dodostoptextbackground \relax
- \doif{\textbackgroundparameter\c!state}\v!start
- {\dopresettextbackground{#1}}%
+ \doif{\textbackgroundparameter\c!state}\v!start{\dopresettextbackground{#1}}%
\dodostarttextbackground}
% todo \backgroundvariable\c!variant
@@ -737,6 +738,7 @@
gridtype=\textbackgroundparameter\c!alternative,
filltype=\textbackgroundparameter\c!background,
linetype=\textbackgroundparameter\c!frame,
+ dashtype=\textbackgroundparameter{dash}, % to be internationalized
gridcolor=\textbackgroundparameter\c!framecolor,
linecolor=\textbackgroundparameter\c!framecolor,
fillcolor=\textbackgroundparameter\c!backgroundcolor,
@@ -946,7 +948,7 @@
\copyparameters[\??td#1][\??td]
[\c!state,\c!location,\c!alternative,\c!mp,\c!method,
\c!background,\c!backgroundcolor,\c!corner,\c!level,
- \c!backgroundoffset,\c!before,\c!after,\c!align,
+ \c!backgroundoffset,\c!before,\c!after,\c!align,dash, % dash not yet internationalized
\c!radius,\c!frame,\c!framecolor,\c!rulethickness,\c!voffset,
\c!leftoffset,\c!rightoffset,\c!topoffset,\c!bottomoffset]%
\getparameters[\??td#1][#2]%
@@ -994,6 +996,7 @@
\c!level=-1,
\c!alternative=0,
\c!align=,
+ dash=0, % to be internationalized
\c!background=\v!color,
\c!backgroundcolor=lightgray,
\c!backgroundoffset=\!!zeropoint,
@@ -1254,90 +1257,46 @@
\newif\ifrepositionmarginbox \repositionmarginboxtrue
-\newcounter\currentmarginpos
-
-% \def\dopositionmarginbox#1%
-% {\bgroup
-% \ifrepositionmarginbox
-% \doglobal\increment\currentmarginpos
-% \setposition{\s!margin:\currentmarginpos}%
-% \scratchdimen=\MPy{\s!margin:\currentmarginpos}%
-% \doglobal\increment\currentmarginpos
-% \advance\scratchdimen by -\MPy{\s!margin:\currentmarginpos}%
-% \advance\scratchdimen by -\strutdp
-% \setbox#1=\hbox
-% {\setposition{\s!margin:\currentmarginpos}\raise\scratchdimen\box#1}%
-% \dp#1=\!!zeropoint
-% \ht#1=\!!zeropoint
-% \fi
-% \vadjust{\box#1}%
-% \egroup}
-
-% \def\dopositionmarginbox#1% how about page boundaries !
-% {\bgroup
-% \ifrepositionmarginbox
-% \doglobal\increment\currentmarginpos
-% \setposition{\s!margin:\currentmarginpos}%
-% \scratchdimen\MPy{\s!margin:\currentmarginpos}%
-% \doglobal\increment\currentmarginpos
-% \advance\scratchdimen -\MPy{\s!margin:\currentmarginpos}%
-% \advance\scratchdimen -\strutdp
-% % new
-% \setbox#1\hbox
-% {\hskip-\MPx{\s!margin:\currentmarginpos}%
-% \hskip\MPx{head:\realfolio}%
-% \box#1}%
-% % so far
-% \setbox#1\hbox
-% {\setposition{\s!margin:\currentmarginpos}%
-% \raise\scratchdimen\box#1}%
-% \dp#1\zeropoint
-% \ht#1\zeropoint
-% \fi
-% \graphicvadjust{\box#1}%
-% \egroup}
+\newcount\currentmarginpos
\def\dopositionmarginbox#1%
{\bgroup
\ifrepositionmarginbox
- \doglobal\increment\currentmarginpos
- \setposition{\s!margin:\currentmarginpos}%
+ \global\advance\currentmarginpos\plusone
+ \setposition{\s!margin:\number\currentmarginpos}%
\ifcase\marginrepositionmethod
% nothing
\or
% nothing
\or
% stack / page check yet untested
-% \scratchcounter\MPp{\s!margin:\currentmarginpos}\relax
- \scratchdimen\MPy{\s!margin:\currentmarginpos}%
- \doglobal\increment\currentmarginpos
- \advance\scratchdimen -\MPy{\s!margin:\currentmarginpos}%
+ \scratchdimen\MPy{\s!margin:\number\currentmarginpos}%
+ \global\advance\currentmarginpos\plusone
+ \advance\scratchdimen -\MPy{\s!margin:\number\currentmarginpos}%
\advance\scratchdimen -\strutdp
-% \ifnum\scratchcounter=\MPp{\s!margin:\currentmarginpos}\relax
% new
\setbox#1\hbox
- {\hskip-\MPx{\s!margin:\currentmarginpos}%
+ {\hskip-\MPx{\s!margin:\number\currentmarginpos}%
\hskip\MPx{head:\realfolio}%
\box#1}%
% so far
\setbox#1\hbox
- {\setposition{\s!margin:\currentmarginpos}%
+ {\setposition{\s!margin:\number\currentmarginpos}%
\raise\scratchdimen\box#1}%
-% \fi
\or
% move up
- \ifnum\MPp{p:\parposcounter}=\MPp{\s!margin:\currentmarginpos}\relax
- \scratchdimen\dimexpr\MPy{p:\parposcounter}-\MPy{\s!margin:\currentmarginpos}\relax
+ \ifnum\MPp{p:\number\parposcounter}=\MPp{\s!margin:\number\currentmarginpos}\relax
+ \scratchdimen\dimexpr\MPy{p:\number\parposcounter}-\MPy{\s!margin:\number\currentmarginpos}\relax
\expanded{\setbox#1\hbox{\raise\scratchdimen\box#1}\ht#1\the\ht#1\dp#1\the\dp#1}%
\fi
\or
% move up, assume end of par
- \ifnum\MPp{p:\parposcounter}=\MPp{\s!margin:\currentmarginpos}\relax
+ \ifnum\MPp{p:\number\parposcounter}=\MPp{\s!margin:\number\currentmarginpos}\relax
\getnoflines\margincontentheight
\advance\noflines\minusone
\scratchdimen\noflines\lineheight
\else
- \scratchdimen\dimexpr\MPy{p:\parposcounter}-\MPy{\s!margin:\currentmarginpos}\relax
+ \scratchdimen\dimexpr\MPy{p:\number\parposcounter}-\MPy{\s!margin:\number\currentmarginpos}\relax
\fi
\expanded{\setbox#1\hbox{\raise\scratchdimen\box#1}\ht#1\the\ht#1\dp#1\the\dp#1}%
\fi
diff --git a/tex/context/base/core-pos.tex b/tex/context/base/core-pos.tex
index 6b0e103fd..ff88efdf8 100644
--- a/tex/context/base/core-pos.tex
+++ b/tex/context/base/core-pos.tex
@@ -116,11 +116,11 @@
%D For postprocessing purposes, we save the number of
%D positions.
-\newcounter\currentpositions % current number of positions
+\newcount\currentpositions % current number of positions
\newcounter\totalnofpositions % total from previous run
\appendtoks
- \savecurrentvalue\totalnofpositions\currentpositions
+ \expanded{\savecurrentvalue\noexpand\totalnofpositions{\the\currentpositions}}%
\to \everybye
%D The next switch can be used to communicate a special
@@ -239,7 +239,7 @@
{\printpaperwidth }%
{\printpaperheight}%
\fi
- \doglobal\increment\currentpositions}
+ \global\advance\currentpositions\plusone}
\def\setpositiononly#1%
{\iftrialtypesetting
@@ -297,7 +297,7 @@
\def\setpositiondataplus#1#2#3#4#5%
{\iftrialtypesetting \else
\initializenextposition
- \hbox to \nextboxwd
+ \hbox % bug: to \nextboxwd
{\edef\currentposition{#1}%
\dosetpositionplus\currentposition
{\the\dimexpr#2\relax}%
@@ -451,20 +451,20 @@
\def\epos#1{\removelastspace\hpos{e:#1}{\strut}}
\def\fpos#1%
- {\setpositionplus{b:#1}\parposcounter\horizontalstrut
+ {\setpositionplus{b:#1}{\number\parposcounter}\horizontalstrut
\ignorespaces}
\def\tpos#1%
{\removelastspace
- \setpositionplus{e:#1}\parposcounter\horizontalstrut}
+ \setpositionplus{e:#1}{\number\parposcounter}\horizontalstrut}
\def\ffpos#1%
- {\setpositionplus{b:#1}\parposcounter\horizontalstrut\wpos{#1}%
+ {\setpositionplus{b:#1}{\number\parposcounter}\horizontalstrut\wpos{#1}%
\ignorespaces}
\def\ttpos#1%
{\removelastspace
- \setpositionplus{e:#1}\parposcounter\horizontalstrut}
+ \setpositionplus{e:#1}{\number\parposcounter}\horizontalstrut}
\def\wpos#1%
{\dontleavehmode\vadjust % may disappear if buried
@@ -503,7 +503,7 @@
%D of them. This mechanism is activated automatically
%D based on information collected in the previous pass.
-\newcounter\parposcounter
+\newcount\parposcounter
\newif\ifpositioningpar
@@ -523,12 +523,12 @@
\chardef\parposstrut=1 % 0 => no strut data, so fall backs used
\def\doregisterparoptions
- {\doglobal\increment\parposcounter
+ {\global\advance\parposcounter\plusone
\begingroup
\leftskip 1\leftskip
\rightskip1\rightskip
\setpositiondataplus
- {p:\parposcounter}% % identifier
+ {p:\number\parposcounter}% identifier
{\the\zeropoint}%
{\the\strutht}%
{\the\strutdp}%
diff --git a/tex/context/base/core-reg.mkiv b/tex/context/base/core-reg.mkiv
index d7dc9a9cb..f4c2cc64c 100644
--- a/tex/context/base/core-reg.mkiv
+++ b/tex/context/base/core-reg.mkiv
@@ -36,13 +36,13 @@
\doglobal\addtocommalist{#1}\allregisters}
\def\mksaveregisterentry#1#2#3#4#5#6#7% class type reference key entry pagespec realpage
- {\expanded{\writeutilitytua{table.insert(jr['#1'],{'#2','#3',\!!bs#4\!!es,\!!bs#5\!!es,'#6','#7'})}}}
+ {\expanded{\writeutilitytua{ti(jr['#1'],{'#2','#3',\!!bs#4\!!es,\!!bs#5\!!es,'#6','#7'})}}}
\def\mksaveregistersee#1#2#3#4#5#6#7% class type reference key entry see pagespec
- {\expanded{\writeutilitytua{table.insert(jr['#1'],{'#2','#3',\!!bs#4\!!es,\!!bs#5\!!es,'#6','#7'})}}}
+ {\expanded{\writeutilitytua{ti(jr['#1'],{'#2','#3',\!!bs#4\!!es,\!!bs#5\!!es,'#6','#7'})}}}
\def\mksaveregistervariable#1#2#3% class type value
- {\expanded{\immediatewriteutilitytua{table.insert(jr['#1'],{'#2','#3'})}}}
+ {\expanded{\immediatewriteutilitytua{ti(jr['#1'],{'#2','#3'})}}}
% Beware, we have no filename support here. For that we need to save the resulting
% tex code in a file. No big deal.
diff --git a/tex/context/base/core-spa.lua b/tex/context/base/core-spa.lua
index 1d8616c3f..9b7486722 100644
--- a/tex/context/base/core-spa.lua
+++ b/tex/context/base/core-spa.lua
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['core-spa'] = {
license = "see context related readme files"
}
+-- todo: test without unset
+
-- vertical space handler
nodes.snapvalues = { }
@@ -20,8 +22,6 @@ do
local kern, glue, penalty, hlist = node.id('kern'), node.id('glue'), node.id('penalty'), node.id('hlist')
- local penalty_node = node.new('penalty')
-
local has_attribute = node.has_attribute
local has_field = node.has_field
@@ -55,7 +55,7 @@ do
-- alignment box begin_of_par vmode_par hmode_par insert penalty before_display after_display
function nodes.is_display_math(head)
- n = head.prev
+ local n = head.prev
while n do
local id = n.id
if id == penalty then
@@ -86,18 +86,6 @@ do
-- helpers
- function nodes.snapline(current,where)
- local sn = has_attribute(current.list,snap_category)
- if sn then
- local sv = nodes.snapvalues[sn]
- if sv then
- local height, depth, lineheight = sv[1], sv[2], sv[3]
- current.height = math.ceil((current.height-height)/lineheight)*lineheight + height
- current.depth = math.ceil((current.depth -depth )/lineheight)*lineheight + depth
- end
- end
- end
-
-- local free = node.free
local line_skip = 1
@@ -112,6 +100,7 @@ do
local function collapser(head,where)
if head and head.next then
+ input.starttiming(nodes)
local trace = nodes.trace_collapse
local current, tail = head, nil
local glue_order, glue_data = 0, nil
@@ -213,8 +202,19 @@ do
if trace then trace_done("before",glue_data) end
glue_order, glue_data = 0, nil
end
- if id == hlist and where == 'hmode_par' and current.list then
- nodes.snapline(current,where) -- will be inline later
+ if id == hlist and where == 'hmode_par' then
+ local list = current.list
+ if list then
+ local sn = has_attribute(list,snap_category)
+ if sn then
+ local sv = nodes.snapvalues[sn]
+ if sv then
+ local height, depth, lineheight = sv[1], sv[2], sv[3]
+ current.height = math.ceil((current.height-height)/lineheight)*lineheight + height
+ current.depth = math.ceil((current.depth -depth )/lineheight)*lineheight + depth
+ end
+ end
+ end
end
current = current.next
end
@@ -230,8 +230,7 @@ do
head, current = nodes.remove(head, parskip, true)
end
if penalty_data then
- local p = node.copy(penalty_node)
- p.penalty = penalty_data
+ local p = nodes.penalty(penalty_data)
if trace then trace_done("before",p) end
head, head = nodes.before(head,head,p)
end
@@ -240,6 +239,7 @@ do
head, tail = nodes.after(head,tail,glue_data)
end
if trace then show_tracing() end
+ input.stoptiming(nodes)
end
return head
end
@@ -247,15 +247,13 @@ do
local head, tail = nil, nil
function nodes.flush_vertical_spacing()
- if head then
- input.start_timing(nodes)
+ if head and head.next then
local t = collapser(head)
head = nil
-- tail = nil
- input.stop_timing(nodes)
return t
else
- return nil
+ return head
end
end
@@ -275,7 +273,6 @@ do
tail = tt
t = nil
else
- input.start_timing(nodes)
if head then
t.prev = tail
tail.next = t
@@ -286,7 +283,6 @@ do
else
t = collapser(t,where)
end
- input.stop_timing(nodes,where)
end
elseif head then
t.prev = tail
@@ -302,7 +298,6 @@ do
function nodes.handle_vbox_spacing(t)
if t and t.next then
- local tail = node.slide(t)
return collapser(t,'whole')
else
return t
@@ -313,8 +308,10 @@ end
-- experimental callback definitions will be moved elsewhere
-callback.register('vpack_filter', nodes.handle_vbox_spacing)
-callback.register('buildpage_filter', nodes.handle_page_spacing)
+-- not yet ... we need to get rid of lastskip stuff first
+--
+-- callback.register('vpack_filter', nodes.handle_vbox_spacing)
+-- callback.register('buildpage_filter', nodes.handle_page_spacing)
-- horizontal stuff
@@ -322,13 +319,8 @@ callback.register('buildpage_filter', nodes.handle_page_spacing)
do
- local kern_node = node.new("kern",1)
- local penalty_node = node.new("penalty")
- local glue_node = node.new("glue")
- local glue_spec_node = node.new("glue_spec")
-
- local contains = node.has_attribute
- local unset = node.unset_attribute
+ local has_attribute = node.has_attribute
+ local unset = node.unset_attribute
local glyph = node.id("glyph")
local kern = node.id("kern")
@@ -337,32 +329,9 @@ do
local hlist = node.id('hlist')
local vlist = node.id('vlist')
---~ function nodes.penalty(p)
---~ local n = node.copy(penalty_node)
---~ n.penalty = p
---~ return n
---~ end
---~ function nodes.kern(k)
---~ local n = node.copy(kern_node)
---~ n.kern = k
---~ return n
---~ end
---~ function nodes.glue(width,stretch,shrink)
---~ local n = node.copy(glue_node)
---~ local s = node.copy(glue_spec_node)
---~ s.width, s.stretch, s.shrink = width, stretch, shrink
---~ n.spec = s
---~ return n
---~ end
---~ function nodes.glue_spec(width,stretch,shrink)
---~ local s = node.copy(glue_spec_node)
---~ s.width, s.stretch, s.shrink = width, stretch, shrink
---~ return s
---~ end
-
spacings = spacings or { }
spacings.mapping = spacings.mapping or { }
- spacings.enabled = true
+ spacings.enabled = false
input.storage.register(false,"spacings/mapping", spacings.mapping, "spacings.mapping")
@@ -380,14 +349,16 @@ do
map.left, map.right = left, right
end
+ -- todo: no ligatures
+
function spacings.process(namespace,attribute,head)
local done, mapping, fontids = false, spacings.mapping, fonts.tfm.id
for start in node.traverse_id(glyph,head) do -- tricky since we inject
- local attr = contains(start,attribute)
+ local attr = has_attribute(start,attribute)
if attr then
local map = mapping[attr]
if map then
- map = mapping[attr][start.char]
+ map = map[start.char]
unset(start,attribute)
if map then
local kern, prev = map.left, start.prev
@@ -414,7 +385,7 @@ do
kerns = kerns or { }
kerns.mapping = kerns.mapping or { }
- kerns.enabled = true
+ kerns.enabled = false
input.storage.register(false, "kerns/mapping", kerns.mapping, "kerns.mapping")
@@ -425,12 +396,12 @@ do
-- local marks = fti[font].shared.otfdata.luatex.marks
-- if not marks[tchar] then
- function kerns.process(namespace,attribute,head) -- todo interchar kerns / disc nodes
+ function kerns.process(namespace,attribute,head) -- todo interchar kerns / disc nodes / can be made faster
local fti, scale = fonts.tfm.id, tex.scale
local start, done, mapping, fontids, lastfont = head, false, kerns.mapping, fonts.tfm.id, nil
while start do
-- faster to test for attr first
- local attr = contains(start,attribute)
+ local attr = has_attribute(start,attribute)
if attr then
unset(start,attribute)
local krn = mapping[attr]
@@ -485,38 +456,87 @@ do
node.insert_before(head,start,nodes.kern(krn))
done = true
elseif pid == disc then
- local d = start.prev
- local pre, post = d.pre, d.post
- if pre then
- local p = d.prev
- local nn, pp = p.prev, p.next
- p.prev, p.next = nil, pre -- hijack node
- pre = kerns.process(namespace,attribute,p)
+ -- probably wrong anyway
+ -- currently this hooks into the node handlere before
+ -- hyphenation takes place, but this may change
+ --
+ -- local d = start.prev
+ -- local pre, post = d.pre, d.post
+ -- if pre then
+ -- local p = d.prev
+ -- local nn, pp = p.prev, p.next
+ -- p.prev, p.next = nil, pre -- hijack node
+ -- pre = kerns.process(namespace,attribute,p)
+ -- pre = pre.next
+ -- pre.prev = nil
+ -- p.prev, p.next = nn, pp
+ -- d.pre = pre
+ -- end
+ -- if post then -- more checks needed
+ -- local tail = node.slide(post)
+ -- local nn, pp = d.next.prev, d.next.next
+ -- d.next.next, d.next.prev = nil, tail
+ -- tail.next = start.next -- hijack node
+ -- post = kerns.process(namespace,attribute,post)
+ -- tail.next = nil
+ -- d.next.prev, d.next.next = nn, pp
+ -- d.post = post
+ -- end
+ -- local prevchar, nextchar = d.prev.char, d.next.char -- == start.char
+ -- local tfm = fti[lastfont].characters[prevchar]
+ -- local ickern = tfm.kerns
+ -- if ickern and ickern[nextchar] then
+ -- krn = scale(ickern[nextchar]+fontids[lastfont].parameters[6],krn)
+ -- else
+ -- krn = scale(fontids[lastfont].parameters[6],krn)
+ -- end
+ -- node.insert_before(head,start,nodes.kern(krn))
+ -- d.replace = d.replace + 1
+ --
+ -- untested:
+ --
+ local disc = start.prev -- disc
+ local pre, post, replace = disc.pre, disc.post, disc.replace
+ if pre then -- must pair with start.prev
+ local before = node.copy(disc.prev)
+ pre.prev = before
+ before.next = pre
+ before.prev = nil
+ pre = kerns.process(namespace,attribute,before)
pre = pre.next
pre.prev = nil
- p.prev, p.next = nn, pp
- d.pre = pre
+ disc.pre = pre
+ node.free(before)
end
- if post then
+ if post then -- must pair with start
+ local after = node.copy(disc.next)
local tail = node.slide(post)
- local nn, pp = d.next.prev, d.next.next
- d.next.next, d.next.prev = nil, tail
- tail.next = start.next -- hijack node
+ tail.next = after
+ after.prev = tail
+ after.next = nil
post = kerns.process(namespace,attribute,post)
tail.next = nil
- d.next.prev, d.next.next = nn, pp
- d.post = post
+ disc.post = post
+ node.free(after)
end
- local prevchar, nextchar = d.prev.char, d.next.char -- == start.char
- local tfm = fti[lastfont].characters[prevchar]
- local ickern = tfm.kerns
- if ickern and ickern[nextchar] then
- krn = scale(ickern[nextchar]+fontids[lastfont].parameters[6],krn)
- else
- krn = scale(fontids[lastfont].parameters[6],krn)
+ if replace then -- must pair with start and start.prev
+ local before = node.copy(disc.prev)
+ local after = node.copy(disc.next)
+ local tail = node.slide(post)
+ replace.prev = before
+ before.next = replace
+ before.prev = nil
+ tail.next = after
+ after.prev = tail
+ after.next = nil
+ replace = kerns.process(namespace,attribute,before)
+ replace = replace.next
+ replace.prev = nil
+ tail.next = nil
+ disc.replace = replace
+ node.free(after)
+ node.free(before)
end
- node.insert_before(head,start,nodes.kern(krn))
- d.replace = d.replace + 1
end
end
elseif id == glue and start.subtype == 0 then
@@ -534,7 +554,7 @@ do
start.kern = scale(sk,krn)
done = true
end
- elseif lastfont and id == hlist or id == vlist then -- todo: lookahead
+ elseif lastfont and (id == hlist or id == vlist) then -- todo: lookahead
if start.prev then
node.insert_before(head,start,nodes.kern(scale(fontids[lastfont].parameters[6],krn)))
done = true
@@ -562,7 +582,7 @@ do
-- relocate node and attribute stuff once it's more complete !!
cases = cases or { }
- cases.enabled = true
+ cases.enabled = false
cases.actions = { }
-- hm needs to be run before glyphs: chars.plugins
@@ -581,7 +601,7 @@ do
local function lower(start)
local data, char = characters.data, start.char
if data[char] then
- local ul = data[char].ulcode
+ local lc = data[char].lccode
if lc and fonts.tfm.id[start.font].characters[lc] then
start.char = lc
return start, true
@@ -656,7 +676,7 @@ do
function cases.process(namespace,attribute,head) -- not real fast but also not used on much data
local done, actions = false, cases.actions
for start in node.traverse_id(glyph,head) do
- local attr = contains(start,attribute)
+ local attr = has_attribute(start,attribute)
if attr then
unset(start,attribute)
local action = actions[attr]
@@ -676,7 +696,8 @@ do
breakpoints = breakpoints or { }
breakpoints.mapping = breakpoints.mapping or { }
- breakpoints.enabled = true
+ breakpoints.methods = breakpoints.methods or { }
+ breakpoints.enabled = false
input.storage.register(false,"breakpoints/mapping", breakpoints.mapping, "breakpoints.mapping")
@@ -689,39 +710,84 @@ do
mapping[char] = { kind or 1, before or 1, after or 1 }
end
+ breakpoints.methods[1] = function(head,start)
+ -- no discretionary needed
+ -- \def\prewordbreak {\penalty\plustenthousand\hskip\zeropoint\relax}
+ -- \def\postwordbreak {\penalty\zerocount\hskip\zeropoint\relax}
+ -- texio.write_nl(string.format("injecting replacement type %s for character %s",map[1],utf.char(start.char)))
+ if start.prev and start.next then
+ node.insert_before(head,start,nodes.penalty(10000))
+ node.insert_before(head,start,nodes.glue(0))
+ node.insert_after(head,start,nodes.glue(0))
+ node.insert_after(head,start,nodes.penalty(0))
+ end
+ return head, start
+ end
+ breakpoints.methods[2] = function(head,start) -- ( => (-
+ if start.prev and start.next then
+ local tmp = start
+ start = nodes.disc()
+ start.prev, start.next = tmp.prev, tmp.next
+ tmp.prev.next, tmp.next.prev = start, start
+ tmp.prev, tmp.next = nil, nil
+ start.replace = tmp
+ local tmp, hyphen = node.copy(tmp), node.copy(tmp)
+ hyphen.char = languages.prehyphenchar(tmp.lang)
+ tmp.next, hyphen.prev = hyphen, tmp
+ start.post = tmp
+ node.insert_before(head,start,nodes.penalty(10000))
+ node.insert_before(head,start,nodes.glue(0))
+ node.insert_after(head,start,nodes.glue(0))
+ node.insert_after(head,start,nodes.penalty(10000))
+ end
+ return head, start
+ end
+ breakpoints.methods[3] = function(head,start) -- ) => -)
+ if start.prev and start.next then
+ local tmp = start
+ start = nodes.disc()
+ start.prev, start.next = tmp.prev, tmp.next
+ tmp.prev.next, tmp.next.prev = start, start
+ tmp.prev, tmp.next = nil, nil
+ start.replace = tmp
+ local tmp, hyphen = node.copy(tmp), node.copy(tmp)
+ hyphen.char = languages.prehyphenchar(tmp.lang)
+ tmp.prev, hyphen.next = hyphen, tmp
+ start.pre = hyphen
+ node.insert_before(head,start,nodes.penalty(10000))
+ node.insert_before(head,start,nodes.glue(0))
+ node.insert_after(head,start,nodes.glue(0))
+ node.insert_after(head,start,nodes.penalty(10000))
+ end
+ return head, start
+ end
+
function breakpoints.process(namespace,attribute,head)
local done, mapping, fontids = false, breakpoints.mapping, fonts.tfm.id
local start, n = head, 0
while start do
local id = start.id
if id == glyph then
- local attr = contains(start,attribute)
+ local attr = has_attribute(start,attribute)
if attr then
- unset(start,attribute)
+ unset(start,attribute) -- maybe test for subtype > 256 (faster)
-- look ahead and back n chars
local map = mapping[attr]
if map then
- map = map[start.char]
- if map then
- if n >= map[2] then
- local m = map[3]
+ local smap = map[start.char]
+ if smap then
+ if n >= smap[2] then
+ local m = smap[3]
local next = start.next
while next do -- gamble on same attribute
local id = next.id
if id == glyph then -- gamble on same attribute
- if m == 1 then
- if map[1] == 1 then
- -- no discretionary needed
- -- \def\prewordbreak {\penalty\plustenthousand\hskip\zeropoint\relax}
- -- \def\postwordbreak {\penalty\zerocount\hskip\zeropoint\relax}
- -- texio.write_nl(string.format("injecting replacement type %s for character %s",map[1],utf.char(start.char)))
- local g, p = nodes.glue(0), nodes.penalty(10000)
- node.insert_before(head,start,g)
- node.insert_before(head,g,p)
- g, p = nodes.glue(0), nodes.penalty(0)
- node.insert_after(head,start,p)
- node.insert_after(head,p,g)
- start = g
+ if map[next.char] then
+ break
+ elseif m == 1 then
+ local method = breakpoints.methods[smap[1]]
+ if method then
+ head, start = method(head,start)
done = true
end
break
diff --git a/tex/context/base/core-spa.mkiv b/tex/context/base/core-spa.mkiv
index 780fbe3dc..4fcad6b31 100644
--- a/tex/context/base/core-spa.mkiv
+++ b/tex/context/base/core-spa.mkiv
@@ -93,8 +93,10 @@
\endgroup
\fi}
-\def\setcharacterspacing[#1]%
- {\dosetattribute{spacing}{\csname\??ch:#1\endcsname}}
+\def\setcharacterspacing
+ {\ctxlua{spacings.enabled=true}%
+ \gdef\setcharacterspacing[##1]{\dosetattribute{spacing}{\csname\??ch:##1\endcsname}}%
+ \setcharacterspacing}
\setvalue{\??ch:\s!reset}{\doresetattribute{spacing}}
@@ -135,8 +137,10 @@
\endgroup
\fi}
-\def\setcharacterkerning[#1]%
- {\dosetattribute{kern}{\csname\??ck:#1\endcsname}}
+\def\setcharacterkerning
+ {\ctxlua{kerns.enabled=true}%
+ \gdef\setcharacterkerning[##1]{\dosetattribute{kern}{\csname\??ck:##1\endcsname}}%
+ \setcharacterkerning}
\setvalue{\??ck:\s!reset}{\doresetattribute{kern}}
@@ -152,9 +156,14 @@
\defineattribute[case]
-\def\WORD{\groupedcommand{\dosetattribute{case}\plusone }{}}
-\def\word{\groupedcommand{\dosetattribute{case}\plustwo }{}}
-\def\Word{\groupedcommand{\dosetattribute{case}\plusthree}{}} % \plusfour
+\def\setcharactercasing
+ {\ctxlua{cases.enabled=true}%
+ \gdef\setcharactercasing[##1]{\dosetattribute{case}{\number##1}}%
+ \setcharactercasing}
+
+\def\WORD{\groupedcommand{\setcharactercasing[\plusone ]}{}}
+\def\word{\groupedcommand{\setcharactercasing[\plustwo ]}{}}
+\def\Word{\groupedcommand{\setcharactercasing[\plusthree]}{}} % \plusfour
\let\WORDS\WORD
\let\words\word
@@ -176,6 +185,8 @@
\definesystemvariable {bp} % BreakPoint
+\exhyphenchar=\minusone % we use a different order then base tex, so we really need this
+
\newcount \maxbreakpointsid
\def\definebreakpoints
@@ -200,14 +211,22 @@
\endgroup
\fi}
-\def\setbreakpoints[#1]%
- {\dosetattribute{breakpoint}{\csname\??bp:#1\endcsname}}
+\def\setbreakpoints
+ {\ctxlua{breakpoints.enabled=true}%
+ \gdef\setbreakpoints[##1]{\dosetattribute{breakpoint}{\csname\??bp:##1\endcsname}}%
+ \setbreakpoints}
\setvalue{\??bp:\s!reset}{\doresetattribute{breakpoint}}
\definebreakpoints[compound]
\installbreakpoint [compound] [\number`+] [\c!left=3,\c!right=3,\c!type=1]
+\installbreakpoint [compound] [\number`-] [\c!left=3,\c!right=3,\c!type=1]
+\installbreakpoint [compound] [\number`/] [\c!left=3,\c!right=3,\c!type=1]
+\installbreakpoint [compound] [\number`(] [\c!left=3,\c!right=3,\c!type=2]
+\installbreakpoint [compound] [\number`)] [\c!left=3,\c!right=3,\c!type=3]
+
+% \setbreakpoints[compound]
\protect \endinput
diff --git a/tex/context/base/core-spa.tex b/tex/context/base/core-spa.tex
index 36157135a..561c0844e 100644
--- a/tex/context/base/core-spa.tex
+++ b/tex/context/base/core-spa.tex
@@ -2299,16 +2299,41 @@
%D Centered looks nicer:
+% \def\dosetstrut
+% {\let\strut\normalstrut
+% \setbox\strutbox\normalhbox
+% {\normalhbox to \zeropoint
+% {% \hss % new, will be option
+% \vrule
+% \!!width \strutwidth
+% \!!height\strutheight
+% \!!depth \strutdepth
+% \hss}}%
+% \struttotal\dimexpr\strutht+\strutdp\relax}
+%
+% because of all the callbacks in mkiv, we avoid unnecessary boxes ...
+% maybe use an attribute so that we can tag boxes that don't need a
+% treatment; tests with using an attribute so far have shown that
+% it's slower because testing the attribute takes time too
+
\def\dosetstrut
{\let\strut\normalstrut
- \setbox\strutbox\normalhbox
- {\normalhbox to \zeropoint
- {% \hss % new, will be option
- \vrule
- \!!width \strutwidth
- \!!height\strutheight
- \!!depth \strutdepth
- \hss}}%
+ \ifdim\strutwidth=\zeropoint
+ \setbox\strutbox\normalhbox
+ {\vrule
+ \!!width \zeropoint
+ \!!height\strutheight
+ \!!depth \strutdepth}%
+ \else
+ \setbox\strutbox\normalhbox
+ {\normalhbox to \zeropoint
+ {% \hss % new, will be option
+ \vrule
+ \!!width \strutwidth
+ \!!height\strutheight
+ \!!depth \strutdepth
+ \hss}}%
+ \fi
\struttotal\dimexpr\strutht+\strutdp\relax}
%D The dimen \type {\struttotal} holds the exact size of the
@@ -2414,7 +2439,7 @@
\fi
\fi\fi}
-\newbox\nostrutbox \setbox\nostrutbox\normalhbox{\normalhbox{}}
+\newbox\nostrutbox \setbox\nostrutbox\normalhbox{} % {\normalhbox{}}
\def\setnostrut
{\setbox\strutbox\copy\nostrutbox
@@ -4505,6 +4530,110 @@
\space
\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi}
+% moved from page-lin
+
+\def\installspacehandler#1#2% needs to set \obeyedspace
+ {\setvalue{\??sr#1}{#2}}
+
+\installspacehandler \v!on
+ {\obeyspaces
+ \def\obeyedspace{\mathortext\normalspace{\dontleavehmode{\tt\controlspace}}}%
+ \let\ =\obeyedspace}
+
+\installspacehandler \v!yes
+ {\obeyspaces
+ \def\obeyedspace{\mathortext\normalspace{\dontleavehmode \normalspace }}%
+ \let\ =\obeyedspace}
+
+\installspacehandler \v!off
+ {\normalspaces
+ \let\obeyedspace\normalspace
+ \let\ =\normalspace}
+
+\installspacehandler \v!fixed
+ {\obeyspaces
+ \def\obeyedspace{\mathortext\normalspace{\dontleavehmode\fixedspace}}%
+ \let\ =\obeyedspace}
+
+\def\activatespacehandler#1%
+ {\executeifdefined{\??sr#1}{\activatespacehandler\v!off}}
+
+% moved from page-lin
+
+%D When spacing is active we need to handle commands in
+%D a special way:
+%D
+%D \starttyping
+%D \setuplines[space=on]
+%D
+%D \startlines
+%D Let's talk about this{\ttsl\gobbleoneargument or}that.
+%D \stoplines
+%D
+%D \startlines
+%D Let's talk about this{\getvalue{ttsl}or}that.
+%D \stoplines
+%D \stoptyping
+%D
+%D One can indent in several ways:
+%D
+%D \starttyping
+%D \setupindenting[medium] \setuplines[indenting=odd] % no yes odd even
+%D
+%D \startlines
+%D first
+%D second
+%D third
+%D fourth
+%D \stoplines
+%D \stoptyping
+
+\def\setuplines
+ {\dodoubleargument\getparameters[\??rg]}
+
+\def\startlines
+ {\@@rgbefore
+ \pushmacro\checkindentation
+ \whitespace
+ %\page[\v!preference]} gaat mis na koppen, nieuw: later \nobreak
+ \begingroup
+ \setupindenting[\@@rgindenting]%
+ \typesettinglinestrue
+ \setupwhitespace[\v!none]%
+ \obeylines
+ \ignorespaces
+ \gdef\afterfirstobeyedline % tzt two pass, net als opsomming
+ {\gdef\afterfirstobeyedline
+ {\nobreak
+ \global\let\afterfirstobeyedline\relax}}%
+ \def\obeyedline
+ {\par
+ \afterfirstobeyedline
+ \futurelet\next\dobetweenthelines}%
+ \activatespacehandler\@@rgspace
+ \GotoPar}
+
+\def\stoplines
+ {\endgroup
+ \popmacro\checkindentation
+ \@@rgafter}
+
+\def\dobetweenthelines
+ {\doifmeaningelse\next\obeyedline\@@rginbetween\donothing}
+
+\setuplines
+ [\c!before=\blank,
+ \c!after=\blank,
+ \c!inbetween=\blank,
+ \c!indenting=\v!no,
+ \c!space=\v!default]
+
+\def\emptylines
+ {\dosingleempty\doemptylines}
+
+\def\doemptylines[#1]%
+ {\endgraf\dorecurse{\iffirstargument#1\else3\fi}\crlf}
+
% plugins
\loadmarkfile{core-spa}
diff --git a/tex/context/base/core-syn.lua b/tex/context/base/core-syn.lua
index fc7b72b5d..7f6ea7614 100644
--- a/tex/context/base/core-syn.lua
+++ b/tex/context/base/core-syn.lua
@@ -62,7 +62,7 @@ do
return split
end
- -- for the moment we use the old structure, some day mmiv code
+ -- for the moment we use the old structure, some day mkiv code
-- will be different: more structure, less mess
local template = {
diff --git a/tex/context/base/core-syn.mkiv b/tex/context/base/core-syn.mkiv
index 8996940dd..23385c9e6 100644
--- a/tex/context/base/core-syn.mkiv
+++ b/tex/context/base/core-syn.mkiv
@@ -36,10 +36,10 @@
\doglobal\addtocommalist{#1}\allsortedlists}
\def\mksavesortedlistentry#1#2#3#4% class key entry meaning
- {\immediatewriteutilitytua{table.insert(js['#1'],{'e','#1',\!!bs#2\!!es,\!!bs#3\!!es})}}
+ {\immediatewriteutilitytua{ti(js['#1'],{'e','#2',\!!bs#3\!!es,\!!bs#4\!!es})}}
\def\mksavesortedlistvariable#1#2#3% class type value
- {\immediatewriteutilitytua{table.insert(js['#1'],{'#2','#3'})}}
+ {\immediatewriteutilitytua{ti(js['#1'],{'#2','#3'})}}
\def\mkloadsortedlist#1% class
{\bgroup
diff --git a/tex/context/base/core-tbl.tex b/tex/context/base/core-tbl.tex
index d63aedd1a..8081ff62a 100644
--- a/tex/context/base/core-tbl.tex
+++ b/tex/context/base/core-tbl.tex
@@ -236,73 +236,6 @@
\def\checktabulatesetups
{\getvalue{\@@tabsetups@@\tabulatecolumn}}
-% \def\dodosettabulatepreamble#1#2%
-% {\ifzeropt\tabulatewidth
-% \ifcase\tabulatemodus\relax
-% \let\preamblebox\empty
-% \else
-% \def\preamblebox{\autotabulatetrue}%
-% \fi
-% \else
-% \ifcase\tabulatemodus\relax
-% \edef\preamblebox{\hbox to \the\tabulatewidth}%
-% \else
-% \edef\preamblebox{\hsize\the\tabulatewidth}%
-% \fi
-% \fi
-% %
-% % less bytes
-% %
-% %\edef\preamblebox%
-% % {\ifcase\tabulatewidth
-% % \ifcase\tabulatemodus\relax\else\noexpand\autotabulatetrue\fi
-% % \els
-% % \ifcase\tabulatemodus\relax\hbox to\else\hsize\fi\the\tabulatewidth
-% % \fi}%
-% %
-% % 0 = NC column next EQ equal column
-% % 1 = RC column raw RQ equal column raw
-% % 2 = HC column hook HQ equal column hook
-% % some entries can be left out if we test for them being set
-% \@EA\appendtoks \@EA&\@EA\hskip\pretabskip##&\to\!!toksa
-% \appendtoks \ignorespaces\to\!!toksa
-% %\@EA\appendtoks\@EA\xdef\@EA\tabulatecolumn\@EA{\tabulatecolumns}\to\!!toksa
-% \@EA\appendtoks\@EA\xdef\@EA\tabulatecolumn\@EA{\the\tabulatecolumns}\to\!!toksa
-% \appendtoks \checktabulatesetups\to\!!toksa
-% \appendtoks \checktabulatehook\to\!!toksa
-% \@EA\appendtoks \preamblebox\to\!!toksa
-% \appendtoks \bgroup\bbskip\bgroup#1\to\!!toksa
-% \appendtoks\ifnum\tabulatetype=\plusone \else \to\!!toksa
-% \@EA\appendtoks \the\tabulatebmath\to\!!toksa
-% \@EA\appendtoks \the\tabulatefont\to\!!toksa
-% \@EA\appendtoks \the\tabulatesettings\to\!!toksa
-% \@EA\appendtoks \the\tabulatebefore\to\!!toksa
-% \appendtoks\fi \to\!!toksa
-% \appendtoks \bgroup\ignorespaces\to\!!toksa
-% %
-% \appendtoks \tabulatehook##\to\!!toksa
-% %
-% %%\doifdefinedelse{\@@tabalign@@\tabulatecolumns}
-% %\doifdefinedelse{\@@tabalign@@\the\tabulatecolumns}
-% % {\appendtoks\handletabulatecharalign## \to\!!toksa}
-% % {\appendtoks\tabulatehook ##\to \!!toksa}%
-% % waarom kan ik hier geen \xx{##} geven, om een of
-% % andere reden passeert dan tex de hele regel (incl \NC's)
-% % als argument; elke delimiter <> space gaat trouwens fout
-% \appendtoks \unskip\unskip\ifmmode\else\endgraf\fi\egroup\to\!!toksa
-% \appendtoks\ifnum\tabulatetype=1 \else \to\!!toksa
-% \@EA\appendtoks \the\tabulateafter\to\!!toksa
-% \@EA\appendtoks \the\tabulateemath\to\!!toksa
-% \appendtoks\fi \to\!!toksa
-% \appendtoks #2\egroup\egroup\to\!!toksa
-% \@EA\appendtoks \@EA&\@EA\hskip\postabskip##\to\!!toksa
-% \appendtoks\NC\to\tabulatedummy
-% \let\bbskip\empty
-% \def\pretabskip{.5\tabulateunit}%
-% \let\postabskip\pretabskip
-% \let\gettabulateexit\dogettabulateexit
-% \tabulatewidth\zeropoint}
-
\let\pretabrule \donothing
\let\posttabrule\donothing
diff --git a/tex/context/base/core-two.mkiv b/tex/context/base/core-two.mkiv
index bbe00be92..d4641e024 100644
--- a/tex/context/base/core-two.mkiv
+++ b/tex/context/base/core-two.mkiv
@@ -20,8 +20,8 @@
\immediatewriteutilitytua{local tp = job.twopass}%
\to \everyopenutilities
-\def\immediatesavetwopassdata #1#2#3{\expanded{\immediatewriteutilitytua{table.insert(tp['#1'],"#3")}}}
-\def\savetwopassdata #1#2#3{\expanded{\writeutilitytua {table.insert(tp['#1'],"#3")}}}
+\def\immediatesavetwopassdata #1#2#3{\expanded{\immediatewriteutilitytua{ti(tp['#1'],"#3")}}}
+\def\savetwopassdata #1#2#3{\expanded{\writeutilitytua {ti(tp['#1'],"#3")}}}
\def\immediatesavetaggedtwopassdata#1#2#3#4{\expanded{\immediatewriteutilitytua{tp['#1']['#3']="#4"}}}
\def\savetaggedtwopassdata #1#2#3#4{\expanded{\writeutilitytua {tp['#1']['#3']="#4"}}}
diff --git a/tex/context/base/core-uti.mkiv b/tex/context/base/core-uti.mkiv
index 88f95efed..8059ed69d 100644
--- a/tex/context/base/core-uti.mkiv
+++ b/tex/context/base/core-uti.mkiv
@@ -61,13 +61,14 @@
\immediatewriteutilitytua{if job and job.version and not job.version == "\utilityversion" then return end}%
\immediatewriteutilitytua{if not job then job = { } end}%
\immediatewriteutilitytua{job.version = "\utilityversion"}%
+ \immediatewriteutilitytua{local ti = table.insert}%
\to \everyopenutilities
\appendtoks
\immediatewriteutilitytua{end}%
\immediatewriteutilitytua{}%
\immediatewriteutilitytua{-- end of utility file}%
- %immediate\closeout\utility@tua
+ %\immediate\closeout\utility@tua
\to \everycloseutilities
% The next file can be in lua or luc format:
@@ -114,4 +115,8 @@
\ctxlua{input.storage.finalize()}%
\to \everyfinalizeluacode
+\appendtoks
+ \ctxlua{nodes.cleanup_reserved()}%
+\to \everydump
+
\protect \endinput
diff --git a/tex/context/base/core-uti.tex b/tex/context/base/core-uti.tex
index 55cebf673..b91abdd3a 100644
--- a/tex/context/base/core-uti.tex
+++ b/tex/context/base/core-uti.tex
@@ -277,6 +277,20 @@
% we need to pop and push, else problems with reading
% utility files (toc) in xml mode and (e.g.) in a toc
% entry doing a doifmode
+%
+% the following is not ok because we have no way to signal
+% xml content (yet), so for the moment we use this:
+
+\appendtoks
+ \ifprocessingXML
+ \processingXMLfalse
+ \enableXML
+ \catcode`\\=\@@escape
+ \catcode`\{=\@@begingroup
+ \catcode`\}=\@@endgroup
+ \catcode`\%=\@@comment\relax
+ \fi
+\to \everybeforeutilityread
\long\def\doutilities#1#2#3#4#5% % introduceren in utility file
{\resetutilities
diff --git a/tex/context/base/enco-ini.mkiv b/tex/context/base/enco-ini.mkiv
index a676c46aa..45e467252 100644
--- a/tex/context/base/enco-ini.mkiv
+++ b/tex/context/base/enco-ini.mkiv
@@ -21,7 +21,9 @@
characters.context.rehash()
\stopruntimectxluacode
-\ctxlua { characters.context.define() } % redefines all \characters
+\ctxlua {
+ characters.context.define()
+} % redefines all \characters
\useencoding[032,033,037] % fallbacks for some unicode chars, todo
diff --git a/tex/context/base/enco-ini.tex b/tex/context/base/enco-ini.tex
index 202fa38ef..05bec2ba7 100644
--- a/tex/context/base/enco-ini.tex
+++ b/tex/context/base/enco-ini.tex
@@ -641,7 +641,8 @@
\pathypsettings\afterassignment\hyphenation\scratchtoks=}
%D This is not needed for patterns because they are loaded grouped
-%D anyway and it saves us an assignment.
+%D anyway and it saves us an assignment. Can go ... no longer
+%D shared patterns.
\def\startpatternloading#1#2#3% % we should use \everypatternloading
{\startreadingfile
diff --git a/tex/context/base/font-afm.lua b/tex/context/base/font-afm.lua
index e6cb9fcd6..b8c2eea59 100644
--- a/tex/context/base/font-afm.lua
+++ b/tex/context/base/font-afm.lua
@@ -19,7 +19,7 @@ away.</p>
fonts = fonts or { }
fonts.afm = fonts.afm or { }
-fonts.afm.version = 1.13 -- incrementing this number one up will force a re-cache
+fonts.afm.version = 1.21 -- incrementing this number one up will force a re-cache
fonts.afm.syncspace = true -- when true, nicer stretch values
fonts.afm.enhance_data = true -- best leave this set to true
fonts.afm.trace_features = false
@@ -35,8 +35,62 @@ fonts.afm.cache = containers.define("fonts", "afm", fonts.afm.version
built in <l n='tfm'/> and <l n='otf'/> reader.</p>
--ldx]]--
+--~ Comment FONTIDENTIFIER LMMATHSYMBOLS10
+--~ Comment CODINGSCHEME TEX MATH SYMBOLS
+--~ Comment DESIGNSIZE 10.0 pt
+--~ Comment CHECKSUM O 4261307036
+--~ Comment SPACE 0 plus 0 minus 0
+--~ Comment QUAD 1000
+--~ Comment EXTRASPACE 0
+--~ Comment NUM 676.508 393.732 443.731
+--~ Comment DENOM 685.951 344.841
+--~ Comment SUP 412.892 362.892 288.889
+--~ Comment SUB 150 247.217
+--~ Comment SUPDROP 386.108
+--~ Comment SUBDROP 50
+--~ Comment DELIM 2390 1010
+--~ Comment AXISHEIGHT 250
+
+do
+
+ local c = lpeg.P("Comment")
+ local s = lpeg.S(" \t")
+ local l = lpeg.S("\n\r")
+ local w = lpeg.C((1 - l)^1)
+ local n = lpeg.C((lpeg.R("09") + lpeg.S("."))^1) / tonumber * s^0
+
+ local fd = { }
+
+ local pattern = ( c * s^1 * (
+ ("CODINGSCHEME" * s^1 * w ) / function(a) end +
+ ("DESIGNSIZE" * s^1 * n * w ) / function(a) fd[ 1] = a end +
+ ("CHECKSUM" * s^1 * n * w ) / function(a) fd[ 2] = a end +
+ ("SPACE" * s^1 * n * "plus" * n * "minus" * n) / function(a,b,c) fd[ 3], fd[ 4], fd[ 5] = a, b, c end +
+ ("QUAD" * s^1 * n ) / function(a) fd[ 6] = a end +
+ ("EXTRASPACE" * s^1 * n ) / function(a) fd[ 7] = a end +
+ ("NUM" * s^1 * n * n * n ) / function(a,b,c) fd[ 8], fd[ 9], fd[10] = a, b, c end +
+ ("DENOM" * s^1 * n * n ) / function(a,b ) fd[11], fd[12] = a, b end +
+ ("SUP" * s^1 * n * n * n ) / function(a,b,c) fd[13], fd[14], fd[15] = a, b, c end +
+ ("SUB" * s^1 * n * n ) / function(a,b) fd[16], fd[17] = a, b end +
+ ("SUPDROP" * s^1 * n ) / function(a) fd[18] = a end +
+ ("SUBDROP" * s^1 * n ) / function(a) fd[19] = a end +
+ ("DELIM" * s^1 * n * n ) / function(a,b) fd[20], fd[21] = a, b end +
+ ("AXISHEIGHT" * s^1 * n ) / function(a) fd[22] = a end +
+ (1-l)^0
+ ) + (1-c)^1)^0
+
+ function fonts.afm.scan_comment(str)
+ fd = { }
+ pattern:match(str)
+ return fd
+ end
+
+end
+
do
+ -- On a rainy day I will rewrite this in lpeg ...
+
local keys = { }
function keys.FontName (data,line) data.fullname = line:strip() end
@@ -122,7 +176,7 @@ do
logs.report("define font", string.format("getting index data from %s",pfbname))
end
-- local offset = (glyphs[0] and glyphs[0] != .notdef) or 0
- for index, glyph in pairs(glyphs) do -- ipairs? offset
+ for index, glyph in pairs(glyphs) do
local name = glyph.name
if name then
local char = characters[name]
@@ -177,11 +231,15 @@ do
end
data.afmversion = version
get_variables(data,fontmetrics)
+ data.fontdimens = fonts.afm.scan_comment(fontmetrics) -- todo: all lpeg, no time now
return ""
end)
get_indexes(data,filename)
return data
else
+ if fonts.trace then
+ logs.report("define font", "no valid afm file " .. filename)
+ end
return nil
end
end
@@ -218,7 +276,7 @@ end
function fonts.afm.unify(data, filename)
local unicode, private, unicodes = containers.content(fonts.enc.cache,'unicode').hash, 0x0F0000, { }
for name, blob in pairs(data.characters) do
- local code = unicode[name]
+ local code = unicode[name] -- or characters.name_to_unicode[name]
if not code then
code = private
private = private + 1
@@ -310,18 +368,20 @@ end
function fonts.afm.copy_to_tfm(data)
if data and data.characters then
local tfm = { characters = { }, parameters = { } }
- local characters = data.characters
- if characters then
- for k, v in pairs(characters) do
- local t = { }
- t.height = v.boundingbox[4]
- t.depth = - v.boundingbox[2]
- t.width = v.wx
- t.boundingbox = v.boundingbox
- t.index = v.index
- t.name = k
- t.unicode = v.unicode
- tfm.characters[t.unicode] = t
+ local afmcharacters = data.characters
+ local characters, parameters = tfm.characters, tfm.parameters
+ if afmcharacters then
+ for k, v in pairs(afmcharacters) do
+ local b, u = v.boundingbox, v.unicode
+ characters[u] = {
+ height = b[4],
+ depth = - b[2],
+ width = v.wx,
+ boundingbox = b,
+ index = v.index,
+ name = k,
+ unicode = u,
+ }
end
end
tfm.encodingbytes = data.encodingbytes or 2
@@ -340,26 +400,26 @@ function fonts.afm.copy_to_tfm(data)
local spaceunits = 500
tfm.spacer = "500 units"
if data.isfixedpitch then
- if characters['space'] and characters['space'].wx then
- spaceunits, tfm.spacer = characters['space'].wx, "space"
- elseif characters['emdash'] and characters['emdash'].wx then -- funny default
- spaceunits, tfm.spacer = characters['emdash'].wx, "emdash"
+ if afmcharacters['space'] and afmcharacters['space'].wx then
+ spaceunits, tfm.spacer = afmcharacters['space'].wx, "space"
+ elseif afmcharacters['emdash'] and afmcharacters['emdash'].wx then -- funny default
+ spaceunits, tfm.spacer = afmcharacters['emdash'].wx, "emdash"
elseif data.charwidth then
spaceunits, tfm.spacer = data.charwidth, "charwidth"
end
- elseif characters['space'] and characters['space'].wx then
- spaceunits, tfm.spacer = characters['space'].wx, "space"
+ elseif afmcharacters['space'] and afmcharacters['space'].wx then
+ spaceunits, tfm.spacer = afmcharacters['space'].wx, "space"
elseif data.charwidth then
spaceunits, tfm.spacer = data.charwidth, "charwidth variable"
end
spaceunits = tonumber(spaceunits)
- tfm.parameters[1] = 0 -- slant
- tfm.parameters[2] = spaceunits -- space
- tfm.parameters[3] = 500 -- space_stretch
- tfm.parameters[4] = 333 -- space_shrink
- tfm.parameters[5] = 400 -- x_height
- tfm.parameters[6] = 1000 -- quad
- tfm.parameters[7] = 0 -- extra_space (todo)
+ parameters[1] = 0 -- slant
+ parameters[2] = spaceunits -- space
+ parameters[3] = 500 -- space_stretch
+ parameters[4] = 333 -- space_shrink
+ parameters[5] = 400 -- x_height
+ parameters[6] = 1000 -- quad
+ parameters[7] = 0 -- extra_space (todo)
if spaceunits < 200 then
-- todo: warning
end
@@ -367,28 +427,35 @@ function fonts.afm.copy_to_tfm(data)
tfm.ascender = math.abs(data.ascender or 0)
tfm.descender = math.abs(data.descender or 0)
if data.italicangle then
- tfm.parameters[1] = tfm.parameters[1] - math.round(math.tan(data.italicangle*math.pi/180))
+ parameters[1] = parameters[1] - math.round(math.tan(data.italicangle*math.pi/180))
end
if data.isfixedpitch then
- tfm.parameters[3] = 0
- tfm.parameters[4] = 0
+ parameters[3] = 0
+ parameters[4] = 0
elseif fonts.afm.syncspace then
-- too little
- -- tfm.parameters[3] = .2*spaceunits -- space_stretch
- -- tfm.parameters[4] = .1*spaceunits -- space_shrink
+ -- parameters[3] = .2*spaceunits -- space_stretch
+ -- parameters[4] = .1*spaceunits -- space_shrink
-- taco's suggestion:
- -- tfm.parameters[3] = .4*spaceunits -- space_stretch
- -- tfm.parameters[4] = .1*spaceunits -- space_shrink
+ -- parameters[3] = .4*spaceunits -- space_stretch
+ -- parameters[4] = .1*spaceunits -- space_shrink
-- knuthian values: (for the moment compatible)
- tfm.parameters[3] = spaceunits/2 -- space_stretch
- tfm.parameters[4] = spaceunits/3 -- space_shrink
+ parameters[3] = spaceunits/2 -- space_stretch
+ parameters[4] = spaceunits/3 -- space_shrink
end
if data.xheight and data.xheight > 0 then
- tfm.parameters[5] = data.xheight
- elseif tfm.characters['x'] and tfm.characters['x'].height then
- tfm.parameters[5] = tfm.characters['x'].height
+ parameters[5] = data.xheight
+ elseif afmcharacters['x'] and afmcharacters['x'].height then
+ parameters[5] = afmcharacters['x'].height
end
- if table.is_empty(tfm.characters) then
+ local fd = data.fontdimens
+ if fd and fd[8] and fd[9] and fd[10] then
+ -- we're dealing with a tex math font
+ for k,v in pairs(fd) do
+ parameters[k] = v
+ end
+ end
+ if table.is_empty(characters) then
return nil
else
return tfm
@@ -462,12 +529,15 @@ function fonts.afm.afm_to_tfm(specification)
local afmname = specification.filename or specification.name
local encoding, filename = afmname:match("^(.-)%-(.*)$") -- context: encoding-name.*
if encoding and filename and fonts.enc.known[encoding] then
--- only when no bla-name is found
fonts.tfm.set_normal_feature(specification,'encoding',encoding) -- will go away
if fonts.trace then
logs.report("define font", string.format("stripping encoding prefix from filename %s",afmname))
end
afmname = filename
+ elseif specification.forced == "afm" then
+ if fonts.trace then
+ logs.report("define font", string.format("forcing afm format for %s",afmname))
+ end
else
local tfmname = input.findbinfile(texmf.instance,afmname,"ofm") or ""
if tfmname ~= "" then
@@ -561,8 +631,8 @@ function fonts.afm.features.prepare_ligatures(tfmdata,ligatures,value) -- probab
if al then
local ligatures = { }
for k,v in pairs(al) do
- ligatures[charlist[k].index] = {
- char = charlist[v].index,
+ ligatures[charlist[k].unicode] = {
+ char = charlist[v].unicode,
type = 0
}
end
@@ -581,7 +651,7 @@ function fonts.afm.features.prepare_kerns(tfmdata,kerns,value)
if newkerns then
local t = chr.kerns or { }
for k,v in pairs(newkerns) do
- t[charlist[k].index] = v
+ t[charlist[k].unicode] = v
end
chr.kerns = t
end
diff --git a/tex/context/base/font-def.lua b/tex/context/base/font-def.lua
index 141ca9fc6..84fb9d569 100644
--- a/tex/context/base/font-def.lua
+++ b/tex/context/base/font-def.lua
@@ -123,23 +123,56 @@ end
function fonts.tfm.hash_features(specification)
if specification.features then
+ local t = { }
local normal = specification.features.normal
if not table.is_empty(normal) then
- local t = { }
for _, v in pairs(table.sortedkeys(normal)) do
+if v ~= "number" then
t[#t+1] = v .. '=' .. tostring(normal[v])
+end
+ end
+ end
+ local vtf = specification.features.vtf
+ if not table.is_empty(vtf) then
+ for _, v in pairs(table.sortedkeys(vtf)) do
+ t[#t+1] = v .. '=' .. tostring(vtf[v])
end
+ end
+ if next(t) then
return table.concat(t,"+")
end
end
return "unknown"
end
+--~ function fonts.tfm.hash_instance(specification)
+--~ if not specification.hash then
+--~ specification.hash = fonts.tfm.hash_features(specification)
+--~ end
+--~ return specification.hash .. ' @ ' .. tostring(specification.size)
+--~ end
+
+fonts.designsizes = { }
+
+--[[ldx--
+<p>In principle we can share tfm tables when we are in node for a font, but then
+we need to define a font switch as an id/attr switch which is no fun, so in that
+case users can best use dynamic features ... so, we will not use that speedup. Okay,
+when we get rid of base mode we can optimize even further by sharing, but then we
+loose our testcases for <l n='luatex'/>.</p>
+--ldx]]--
+
function fonts.tfm.hash_instance(specification)
- if not specification.hash then
- specification.hash = fonts.tfm.hash_features(specification)
+ local hash, size = specification.hash, specification.size
+ if not hash then
+ hash = fonts.tfm.hash_features(specification)
+ specification.hash = hash
+ end
+ if size < 1000 and fonts.designsizes[hash] then
+ size = fonts.tfm.scaled(size, fonts.designsizes[hash])
+ specification.size = size
end
- return specification.hash .. ' @ ' .. tostring(specification.size)
+ return hash .. ' @ ' .. tostring(size)
end
--[[ldx--
@@ -185,7 +218,7 @@ specification yet.</p>
function fonts.tfm.read(specification)
garbagecollector.push()
- input.start_timing(fonts)
+ input.starttiming(fonts)
local hash = fonts.tfm.hash_instance(specification)
local tfmtable = fonts.tfm.fonts[hash] -- hashes by size !
if not tfmtable then
@@ -213,8 +246,10 @@ function fonts.tfm.read(specification)
end
end
fonts.tfm.fonts[hash] = tfmtable
+fonts.designsizes[specification.hash] = tfmtable.designsize -- we only know this for sure after loading once
+--~ tfmtable.mode = specification.features.normal.mode or "base"
end
- input.stop_timing(fonts)
+ input.stoptiming(fonts)
garbagecollector.pop()
if not tfmtable then
logs.error("define font",string.format("font with name %s is not found",specification.name))
@@ -319,6 +354,7 @@ end
<p>So far we haven't really dealt with features (or whatever we want
to pass along with the font definition. We distinguish the following
situations:</p>
+situations:</p>
<code>
name:xetex like specs
@@ -338,7 +374,7 @@ end
fonts.define.register_split("@", fonts.define.specify.predefined)
-function fonts.define.specify.colonized(specification)
+function fonts.define.specify.colonized(specification) -- xetex mode
local list = { }
if specification.detail and specification.detail ~= "" then
local expanded_features = { }
@@ -378,29 +414,88 @@ end
fonts.define.register_split(":", fonts.define.specify.colonized)
-fonts.define.specify.context_setups = fonts.define.specify.context_setups or { }
+fonts.define.specify.context_setups = fonts.define.specify.context_setups or { }
+fonts.define.specify.context_numbers = fonts.define.specify.context_numbers or { }
+fonts.define.specify.synonyms = fonts.define.specify.synonyms or { }
-input.storage.register(false,"fonts/setups", fonts.define.specify.context_setups, "fonts.define.specify.context_setups")
+input.storage.register(false,"fonts/setups" , fonts.define.specify.context_setups , "fonts.define.specify.context_setups" )
+input.storage.register(false,"fonts/numbers", fonts.define.specify.context_numbers, "fonts.define.specify.context_numbers")
function fonts.define.specify.preset_context(name,features)
+ local fds = fonts.define.specify
+ local setups, numbers, synonyms = fds.context_setups, fds.context_numbers, fds.synonyms
+ local number = (setups[name] and setups[name].number) or 0
local t = aux.settings_to_hash(features)
for k,v in pairs(t) do
+ k = synonyms[k] or k
t[k] = v:is_boolean()
if type(t[k]) == "nil" then
t[k] = v
end
end
- fonts.define.specify.context_setups[name] = t
+ if number == 0 then
+ numbers[#numbers+1] = name
+ t.number = #numbers
+ else
+ t.number = number
+ end
+ setups[name] = t
+end
+
+--~ function fonts.define.specify.context_number(name)
+--~ local s = fonts.define.specify.context_setups[name]
+--~ return (s and s.number) or -1
+--~ end
+
+do
+
+ -- here we clone features according to languages
+
+ local default = 0
+ local setups = fonts.define.specify.context_setups
+ local numbers = fonts.define.specify.context_numbers
+
+ function fonts.define.specify.context_number(name)
+ local t = setups[name]
+ if not t then
+ return default
+ elseif t.auto then
+ local lng = tonumber(tex.language)
+ local tag = name .. ":" .. lng
+ local s = setups[tag]
+ if s then
+ return s.number or default
+ else
+ local script, language = languages.association(lng)
+ if t.script ~= script or t.language ~= language then
+ local s = table.fastcopy(t)
+ local n = #numbers + 1
+ setups[tag] = s
+ numbers[n] = tag
+ s.number = n
+ s.script = script
+ s.language = language
+ return n
+ else
+ setups[tag] = t
+ return t.number or default
+ end
+ end
+ else
+ return t.number or default
+ end
+ end
+
end
-function fonts.define.specify.context_tostring(name,kind,separator,yes,no,strict)
- return aux.hash_to_string(table.merged(fonts[kind].features.default or {},fonts.define.specify.context_setups[name] or {}),separator,yes,no,strict)
+function fonts.define.specify.context_tostring(name,kind,separator,yes,no,strict,omit)
+ return aux.hash_to_string(table.merged(fonts[kind].features.default or {},fonts.define.specify.context_setups[name] or {}),separator,yes,no,strict,omit)
end
function fonts.define.specify.split_context(features)
if fonts.define.specify.context_setups[features] then
return fonts.define.specify.context_setups[features]
- else
+ else -- ? ? ?
return fonts.define.specify.preset_context("***",features)
end
end
@@ -454,7 +549,7 @@ function fonts.define.read(name,size,id)
specification = fonts.define.resolve(specification)
local hash = fonts.tfm.hash_instance(specification)
if true then
- local fontdata = containers.read(fonts.cache,hash) -- for tracing purposes
+ --~ local fontdata = containers.read(fonts.cache,hash) -- for tracing purposes
end
local fontdata = fonts.tfm.internalized[hash] -- id
if not fontdata then
@@ -465,7 +560,7 @@ function fonts.define.read(name,size,id)
fonts.tfm.check_virtual_id(fontdata)
end
if true then
- fontdata = containers.write(fonts.cache,hash,fontdata) -- for tracing purposes
+ --~ fontdata = containers.write(fonts.cache,hash,fontdata) -- for tracing purposes
end
if not fonts.tfm.internalized[hash] then
fonts.tfm.id[id] = fontdata
diff --git a/tex/context/base/font-enc.lua b/tex/context/base/font-enc.lua
index 3cc6433b2..2d1005ad2 100644
--- a/tex/context/base/font-enc.lua
+++ b/tex/context/base/font-enc.lua
@@ -95,15 +95,20 @@ end
one.</p>
--ldx]]--
-do
+-- maybe make this a function:
+
+function fonts.enc.make_unicode_vector()
local vector, hash = { }, { }
- for k,v in pairs(characters.data) do
- local a = v.adobename
- if a then
- vector[k], hash[a] = a, k
+ for code, v in pairs(characters.data) do
+ local name = v.adobename
+ if name then
+ vector[code], hash[name] = name, code
else
- vector[k] = '.notdef'
+ vector[code] = '.notdef'
end
end
+ for name, code in pairs(characters.synonyms) do
+ vector[code], hash[name] = name, code
+ end
containers.write(fonts.enc.cache, 'unicode', { name='unicode', tag='unicode', vector=vector, hash=hash })
end
diff --git a/tex/context/base/font-fbk.lua b/tex/context/base/font-fbk.lua
index b81b94309..e5975a38e 100644
--- a/tex/context/base/font-fbk.lua
+++ b/tex/context/base/font-fbk.lua
@@ -111,7 +111,7 @@ end
fonts.vf.aux.combine.force_composed = false
-fonts.vf.aux.combine.commands["complete-composed-characters"] = function(g,v)
+ fonts.vf.aux.combine.commands["complete-composed-characters"] = function(g,v)
local chars = g.characters
local cap_lly = chars[string.byte("X")].boundingbox[4]
local ita_cor = math.cos(math.rad(90+g.italicangle))
@@ -126,7 +126,10 @@ fonts.vf.aux.combine.commands["complete-composed-characters"] = function(g,v)
local cc = c.category
if (cc == 'll') or (cc == 'lu') or (cc == 'lt') then
local acc = s[3]
- chars[i] = table.fastcopy(chars[chr])
+ local t = table.fastcopy(chars[chr])
+t.name = ""
+t.index = i
+t.unicode = i
if chars[acc] then
local cb = chars[chr].boundingbox
local ab = chars[acc].boundingbox
@@ -139,9 +142,10 @@ fonts.vf.aux.combine.commands["complete-composed-characters"] = function(g,v)
local dd = (c_urx-c_llx)*ita_cor
if a_ury < 0 then
local dy = cap_lly-a_lly
- chars[i].commands = {
+ t.commands = {
{"push"},
{"right", dx-dd},
+ {"down", -dy}, -- added
{special, red},
{"slot", 1, acc},
{special, black},
@@ -150,7 +154,7 @@ fonts.vf.aux.combine.commands["complete-composed-characters"] = function(g,v)
}
elseif c_ury > a_lly then
local dy = cap_lly-a_lly
- chars[i].commands = {
+ t.commands = {
{"push"},
{"right", dx+dd},
{"down", -dy},
@@ -161,7 +165,7 @@ fonts.vf.aux.combine.commands["complete-composed-characters"] = function(g,v)
{"slot", 1, chr},
}
else
- chars[i].commands = {
+ t.commands = {
{"push"},
{"right", dx+dd},
{special, blue},
@@ -171,6 +175,7 @@ fonts.vf.aux.combine.commands["complete-composed-characters"] = function(g,v)
{"slot", 1, chr},
}
end
+ chars[i] = t
end
end
end
diff --git a/tex/context/base/font-ini.lua b/tex/context/base/font-ini.lua
index d4adf360b..ce4a9e431 100644
--- a/tex/context/base/font-ini.lua
+++ b/tex/context/base/font-ini.lua
@@ -34,6 +34,10 @@ fonts.triggers = {
'script'
}
+fonts.define = fonts.define or { }
+fonts.define.specify = fonts.define.specify or { }
+fonts.define.specify.synonyms = fonts.define.specify.synonyms or { }
+
-- tracing
do
@@ -42,7 +46,7 @@ do
fonts.color.trace = false
- local attribute = attributes.numbers['color'] or 4 -- we happen to know this -)
+ local attribute = attributes.numbers['color'] or 7 -- we happen to know this -)
local mapping = attributes.list[attribute]
local set_attribute = node.set_attribute
diff --git a/tex/context/base/font-ini.mkii b/tex/context/base/font-ini.mkii
index ef2ab85a7..f4ed9893e 100644
--- a/tex/context/base/font-ini.mkii
+++ b/tex/context/base/font-ini.mkii
@@ -16,4 +16,35 @@
\def\mkdefinefontfeature#1% #2
{\setvalue{\??fa#1}} % {#2}
+\def\updatefontparameters
+ {\edef\@@fontencoding{\truefontdata\fontfile \s!encoding}%
+ \edef\@@fontmapping {\truefontdata\fontfile \s!mapping }%
+ \edef\@@fonthandling{\truefontdata\somefontname\s!handling}%
+ \edef\@@fontfeatures{\truefontdata\fontfile \s!features}%
+ \edef\@@fontskewchar{\truefontdata\fontfile \s!skewchar}}
+
+\def\setfontcharacteristics
+ {\updatefontparameters % redundant, will go away, faster too
+ \fastenableencoding
+ {\ifx\@@fontencoding\empty
+ \s!default \else \@@fontencoding
+ \fi}%
+ \fastenablemapping
+ {\ifx\@@fontmapping\empty
+ \ifx\@@fontencoding\empty
+ \s!default \else \@@fontencoding
+ \fi
+ \else
+ \@@fontmapping
+ \fi}%
+ \fastenablehandling
+ {\ifx\@@fonthandling\empty
+ \s!default \else \@@fonthandling
+ \fi}%
+ {\lastfontidentifier}%
+ \the\everyfont
+ \synchronizepatternswithfont}
+
+\ifx\synchronizepatternswithfont\undefined \def\synchronizepatternswithfont{\synchronizepatterns} \fi
+
\protect \endinput
diff --git a/tex/context/base/font-ini.mkiv b/tex/context/base/font-ini.mkiv
index 45ff3480e..86b21fa58 100644
--- a/tex/context/base/font-ini.mkiv
+++ b/tex/context/base/font-ini.mkiv
@@ -22,6 +22,10 @@
\registerctxluafile{font-def}{1.001}
\registerctxluafile{font-fbk}{1.001}
+\startruntimectxluacode
+ fonts.enc.make_unicode_vector()
+\stopruntimectxluacode
+
\unprotect
\def\mkdefinefontfeature#1#2%
@@ -77,6 +81,44 @@
{\dodoubleargument\dofontfeatureslist}
\def\dofontfeatureslist[#1][#2]% todo: arg voor type
- {\ctxlua{tex.sprint(tex.ctxcatcodes,fonts.define.specify.context_tostring("#1","otf","\luaescapestring{#2}","yes","no",true))}}
+ {\ctxlua{tex.sprint(tex.ctxcatcodes,fonts.define.specify.context_tostring("#1","otf","\luaescapestring{#2}","yes","no",true,{"number"}))}}
+
+\def\definefontlocal#1%
+ {\expandafter\font\csname#1\endcsname\lastfontname\relax}
+
+\def\definefontglobal#1%
+ {\global\expandafter\font\csname#1:\endcsname\lastfontname\relax}
+
+\attribute\zerocount\zerocount % first in list, so fast match
+
+% \def\featureattribute#1{\ctxlua{tex.sprint(fonts.define.specify.context_number("#1"))}}
+% \def\setfontfeature #1{\attribute\zerocount\featureattribute{#1}\relax}
+% \def\resetfontfeature#1{\attribute\zerocount\zerocount}
+
+\let\currentfeature\empty
+
+\def\featureattribute#1{\ctxlua{tex.sprint(fonts.define.specify.context_number("#1"))}}
+\def\setfontfeature #1{\edef\currentfeature{#1}\attribute\zerocount\featureattribute{#1}\relax}
+\def\resetfontfeature#1{\let\currentfeature\empty\attribute\zerocount\zerocount}
+
+\appendtoks
+ \setfontfeature\currentfeature
+\to \everylanguage
+
+%D Simpler:
+
+\def\updatefontparameters
+ {\edef\@@fonthandling{\truefontdata\somefontname\s!handling}%
+ \edef\@@fontfeatures{\truefontdata\fontfile \s!features}%
+ \edef\@@fontskewchar{\truefontdata\fontfile \s!skewchar}}
+
+\def\setfontcharacteristics
+ {\fastenablehandling{\ifx\@@fonthandling\empty\s!default\else\@@fonthandling\fi}\lastfontidentifier
+ \the\everyfont}
+
+%D Predefined:
+
+% \installfontfeature[otf][tlig]
+% \installfontfeature[otf][trep]
\protect \endinput
diff --git a/tex/context/base/font-ini.tex b/tex/context/base/font-ini.tex
index 0b8da7756..b15e3812d 100644
--- a/tex/context/base/font-ini.tex
+++ b/tex/context/base/font-ini.tex
@@ -1240,7 +1240,6 @@
\expandafter\dogetglobalfontparameter
\fi}
-\let\@@fontresource\empty
\let\@@fontencoding\empty
\let\@@fontmapping \empty
\let\@@fonthandling\empty
@@ -1322,12 +1321,7 @@
% \definefontsynonym[Serif] [palatinonova-regular*default]
% \definefontsynonym[SerifCaps] [palatinonova-regular*default-caps]
% \stoptypescript
-%
-% \starttypescript [serif] [palatino-nova-regular] [name]
-% \definefontsynonym[Serif] [palatinonova-regular] [resource=palatinonova-regular,features=default]
-% \definefontsynonym[SerifCaps][palatinonova-regular-sc][resource=palatinonova-regular,features=default-caps] % caps
-% \stoptypescript
-%
+
% \definetypeface[mainface][rm][serif][palatino-nova-regular][default] \setupbodyfont[mainface]
%
% \starttext
@@ -1399,14 +1393,12 @@
\def\docheckfontfilename#1*#2#3*#4\relax % class overrules file
{\edef\checkedfontfile{{%
- \ifx\@@fontresource\empty#1\else\@@fontresource\fi
+ #1%
\expandafter\ifx\csname\fontclass\s!features\endcsname\empty
\ifx\@@fontfeatures\empty\ifx#2\empty\else*#2#3\fi\else*\@@fontfeatures\fi
- \else\expandafter\ifx\csname\fontclass\s!features\endcsname\relax % redundant, will go away
- \ifx\@@fontfeatures\empty\ifx#2\empty\else*#2#3\fi\else*\@@fontfeatures\fi
\else
*\csname\fontclass\s!features\endcsname
- \fi\fi
+ \fi
}}%
\doshowcheckedfontfeatures}
@@ -1468,29 +1460,28 @@
\edef\!!stringb{#2}%
\ifx\!!stringb\empty
% no prefix
- \edef\checkedfontfile{\ifx\@@fontresource\empty\!!stringa\else\@@fontresource\fi}%
+ \let\checkedfontfile\!!stringa
\doiffoundxetexfontelse{1a}{\checkedfontfile\checkedfontfeatures}
{\edef\checkedfontfile{\checkedfontfile\checkedfontfeatures}}
{\doiffoundxetexfontelse{1b}{"\checkedfontfile\checkedfontfeatures"}
{\edef\checkedfontfile{"\checkedfontfile\checkedfontfeatures"}}
{\doiffoundxetexfontelse{1c}{"[\checkedfontfile]\checkedfontfeatures"}
{\edef\checkedfontfile{"[\checkedfontfile]\checkedfontfeatures"}}
- {\edef\checkedfontfile{\checkedfontfile}}}}%
+ {}}}%
\else\ifx\!!stringa\v!file
% force file, only file check when no spaces
- \edef\checkedfontfile{\ifx\@@fontresource\empty\!!stringb\else\@@fontresource\fi}%
+ \let\checkedfontfile\!!stringb
\doiffoundxetexfontelse{2b}{"[\checkedfontfile]\checkedfontfeatures"}
{\edef\checkedfontfile{"[\checkedfontfile]\checkedfontfeatures"}}
{\doiffoundxetexfontelse{2c}{"\checkedfontfile\checkedfontfeatures"}
{\edef\checkedfontfile{"\checkedfontfile\checkedfontfeatures"}}
- {\edef\checkedfontfile{\checkedfontfile}}}%
+ {}}%
\else\ifx\!!stringa\v!name
% force name, always lookup by xetex itself, "" forces otf/ttf/type1
- \edef\checkedfontfile{\ifx\@@fontresource\empty\!!stringb\else\@@fontresource\fi}%
- \edef\checkedfontfile{"\checkedfontfile\checkedfontfeatures"}%
+ \edef\checkedfontfile{"\!!stringb\checkedfontfeatures"}%
\else
% whatever, maybe even xetex spec, forget about features
- \edef\checkedfontfile{"\ifx\@@fontresource\empty\!!stringa\!!stringb\else\@@fontresource\fi"}%
+ \edef\checkedfontfile{"\!!stringa\!!stringb"}%
\fi\fi\fi}
\def\checkfontfilename% -- todo: integrate so that we call do.. directly
@@ -1965,7 +1956,6 @@
\edef\currentfontfileencoding{\truefontdata\@@truefontname\s!encoding}%
\edef\currentfontfilemapping {\truefontdata\@@truefontname\s!mapping }%
\edef\currentfontfilehandling{\truefontdata\@@truefontname\s!handling}%
- \edef\currentfontfileresource{\truefontdata\@@truefontname\s!resource}%
\edef\currentfontfilefeatures{\truefontdata\@@truefontname\s!features}}
%D \macros
@@ -3531,37 +3521,8 @@
%D The font specific features are bound to the filename.
-\def\updatefontparameters % can be simpler for mkii (and mkiv no font encoding)
- {\edef\@@fontencoding{\truefontdata\fontfile \s!encoding}%
- \edef\@@fontmapping {\truefontdata\fontfile \s!mapping }%
- \edef\@@fonthandling{\truefontdata\somefontname\s!handling}%
- \edef\@@fontfeatures{\truefontdata\fontfile \s!features}%
- \edef\@@fontresource{\truefontdata\fontfile \s!resource}%
- \edef\@@fontskewchar{\truefontdata\fontfile \s!skewchar}}
-
-\def\setfontcharacteristics
- {\updatefontparameters % redundant, will go away, faster too
- \fastenableencoding
- {\ifx\@@fontencoding\empty
- \s!default \else \@@fontencoding
- \fi}%
- \fastenablemapping
- {\ifx\@@fontmapping\empty
- \ifx\@@fontencoding\empty
- \s!default \else \@@fontencoding
- \fi
- \else
- \@@fontmapping
- \fi}%
- \fastenablehandling
- {\ifx\@@fonthandling\empty
- \s!default \else \@@fonthandling
- \fi}%
- {\lastfontidentifier}%
- \the\everyfont
- \synchronizepatternswithfont}
-
-\ifx\synchronizepatternswithfont\undefined \def\synchronizepatternswithfont{\synchronizepatterns} \fi
+\ifx\updatefontparameters \undefined \let\updatefontparameters \relax \fi
+\ifx\setfontcharacteristics\undefined \let\setfontcharacteristics\relax \fi
%D Experimental:
diff --git a/tex/context/base/font-otf.lua b/tex/context/base/font-otf.lua
index 46ec72aa0..68142e4c4 100644
--- a/tex/context/base/font-otf.lua
+++ b/tex/context/base/font-otf.lua
@@ -6,27 +6,12 @@ if not modules then modules = { } end modules ['font-otf'] = {
license = "see context related readme files"
}
+--- todo: featuredata is now indexed by kind,lookup but probably lookup is okay too
-- abvf abvs blwf blwm blws dist falt half halt jalt lfbd ljmo
-- mset opbd palt pwid qwid rand rtbd rtla ruby size tjmo twid valt vatu vert
-- vhal vjmo vkna vkrn vpal vrt2
--- otfdata zit in tfmdata / check
-
---~ function string:split_at_space()
---~ local t = { }
---~ for s in self:gmatch("(%S+)") do
---~ t[#t+1] = s
---~ end
---~ return t
---~ end
-
--- beware, the node related functions need to return head, current -- todo
--- we may move marks to components so that parsing is faster
-
--- using for i=1,#t do ... t[i] ... end is much faster than using ipairs
--- copying some functions is faster than sharing code chunks esp here
-
--[[ldx--
<p>This module is sparesely documented because it is a moving target.
The table format of the reader changes and we experiment a lot with
@@ -34,11 +19,20 @@ different methods for supporting features.</p>
<p>As with the <l n='afm'/> code, we may decide to store more information
in the <l n='otf'/> table.</p>
+
+<p>Incrementing the version number will force a re-cache. We jump the
+number by one when there's a fix in the <l n='fontforge'/> library or
+<l n='lua'/> code that results in different tables.</p>
--ldx]]--
+--~ The node based processing functions look quite complex which is mainly due to
+--~ the fact that we need to share data and cache resolved issues (saves much memory and
+--~ is also faster). A further complication is that we support static as well as dynamic
+--~ features.
+
fonts = fonts or { }
fonts.otf = fonts.otf or { }
-fonts.otf.version = 1.64 -- incrementing this number one up will force a re-cache
+fonts.otf.version = 1.73
fonts.otf.tables = fonts.otf.tables or { }
fonts.otf.meanings = fonts.otf.meanings or { }
fonts.otf.enhance_data = false
@@ -49,6 +43,7 @@ fonts.otf.features.data = { }
fonts.otf.features.list = { } -- not (yet) used, oft fonts have gpos/gsub lists
fonts.otf.features.default = { }
fonts.otf.trace_features = false
+fonts.otf.trace_set_features = false
fonts.otf.trace_replacements = false
fonts.otf.trace_contexts = false
fonts.otf.trace_anchors = false
@@ -702,6 +697,10 @@ end
fonts.otf.enhance = fonts.otf.enhance or { }
fonts.otf.enhance.add_kerns = true
+fonts.otf.featurefiles = {
+--~ "texhistoric.fea"
+}
+
function fonts.otf.load(filename,format,sub,featurefile)
local name = file.basename(file.removesuffix(filename))
if featurefile then
@@ -729,13 +728,19 @@ function fonts.otf.load(filename,format,sub,featurefile)
end
if ff then
logs.report("load otf","loading: " .. filename)
- if featurefile then
- featurefile = input.find_file(texmf.instance,file.addsuffix(featurefile,'fea'),"FONTFEATURES")
- if featurefile and featurefile ~= "" then
- logs.report("load otf", "featurefile: " .. featurefile)
- fontforge.apply_featurefile(ff, featurefile)
+ local function load_featurefile(featurefile)
+ if featurefile then
+ featurefile = input.find_file(texmf.instance,file.addsuffix(featurefile,'fea'),"FONTFEATURES")
+ if featurefile and featurefile ~= "" then
+ logs.report("load otf", "featurefile: " .. featurefile)
+ fontforge.apply_featurefile(ff, featurefile)
+ end
end
end
+ for _, featurefile in pairs(fonts.otf.featurefiles) do
+ load_featurefile(featurefile)
+ end
+ load_featurefile(featurefile)
data = fontforge.to_table(ff)
fontforge.close(ff)
if data then
@@ -754,19 +759,19 @@ function fonts.otf.load(filename,format,sub,featurefile)
end
end
end
- if data then
- local map = data.map.map
- local backmap = data.map.backmap
- local unicodes = data.luatex.unicodes
- local glyphs = data.glyphs
- -- maybe handy some day, not used
- data.name_to_unicode = function (n) return unicodes[n] end
- data.name_to_index = function (n) return map[unicodes[n]] end
- data.index_to_name = function (i) return glyphs[i].name end
- data.unicode_to_name = function (u) return glyphs[map[u]].name end
- data.index_to_unicode = function (u) return backmap[u] end
- data.unicode_to_index = function (u) return map[u] end
- end
+--~ if data then
+--~ local map = data.map.map
+--~ local backmap = data.map.backmap
+--~ local unicodes = data.luatex.unicodes
+--~ local glyphs = data.glyphs
+--~ -- maybe handy some day, not used
+--~ data.name_to_unicode = function (n) return unicodes[n] end
+--~ data.name_to_index = function (n) return map[unicodes[n]] end
+--~ data.index_to_name = function (i) return glyphs[i].name end
+--~ data.unicode_to_name = function (u) return glyphs[map[u]].name end
+--~ data.index_to_unicode = function (u) return backmap[u] end
+--~ data.unicode_to_index = function (u) return map[u] end
+--~ end
return data
end
@@ -786,36 +791,98 @@ function fonts.otf.enhance.analyze(data,filename)
data.luatex = t
end
-function fonts.otf.load_cidmap(filename)
- local data = io.loaddata(filename)
- if data then
- local unicodes, names = { }, {}
- data = data:gsub("^(%d+)%s+(%d+)\n","")
- for a,b in data:gmatch("(%d+)%s+([%d%a]+)\n") do
- unicodes[tonumber(a)] = tonumber(b,16)
- end
- for a,b,c in data:gmatch("(%d+)%.%.(%d+)%s+([%d%a]+)%s*\n") do
- c = tonumber(c,16)
- for i=tonumber(a),tonumber(b) do
- unicodes[i] = c
- c = c + 1
- end
- end
- for a,b in data:gmatch("(%d+)%s+\/(%S+)%s*\n") do
- names[tonumber(a)] = b
- end
- local supplement, registry, ordering = filename:match("^(.-)%-(.-)%-()%.(.-)$")
- return {
- supplement = supplement,
- registry = registry,
- ordering = ordering,
- filename = filename,
- unicodes = unicodes,
- names = names
- }
- else
- return nil
+--~ function fonts.otf.load_cidmap(filename) -- lpeg
+--~ local data = io.loaddata(filename)
+--~ if data then
+--~ local unicodes, names = { }, {}
+--~ data = data:gsub("^(%d+)%s+(%d+)\n","")
+--~ for a,b in data:gmatch("(%d+)%s+([%d%a]+)\n") do
+--~ unicodes[tonumber(a)] = tonumber(b,16)
+--~ end
+--~ for a,b,c in data:gmatch("(%d+)%.%.(%d+)%s+([%d%a]+)%s*\n") do
+--~ c = tonumber(c,16)
+--~ for i=tonumber(a),tonumber(b) do
+--~ unicodes[i] = c
+--~ c = c + 1
+--~ end
+--~ end
+--~ for a,b in data:gmatch("(%d+)%s+\/(%S+)%s*\n") do
+--~ names[tonumber(a)] = b
+--~ end
+--~ local supplement, registry, ordering = filename:match("^(.-)%-(.-)%-()%.(.-)$")
+--~ return {
+--~ supplement = supplement,
+--~ registry = registry,
+--~ ordering = ordering,
+--~ filename = filename,
+--~ unicodes = unicodes,
+--~ names = names
+--~ }
+--~ else
+--~ return nil
+--~ end
+--~ end
+
+do
+ -- original string parsr: 0.109, lpeg parser: 0.036 seconds for Adobe-CNS1-4.cidmap
+ --
+ -- 18964 18964 (leader)
+ -- 0 /.notdef
+ -- 1..95 0020
+ -- 99 3000
+
+ local number = lpeg.C(lpeg.R("09","af","AF")^1)
+ local space = lpeg.S(" \n\r\t")
+ local spaces = space^0
+ local period = lpeg.P(".")
+ local periods = period * period
+ local name = lpeg.P("/") * lpeg.C((1-space)^1)
+
+ local unicodes, names = { }, {}
+
+ local tonumber = tonumber
+
+ function do_one(a,b)
+ unicodes[tonumber(a)] = tonumber(b,16)
+ end
+ function do_range(a,b,c)
+ c = tonumber(c,16)
+ for i=tonumber(a),tonumber(b) do
+ unicodes[i] = c
+ c = c + 1
+ end
+ end
+ function do_name(a,b)
+ names[tonumber(a)] = b
+ end
+
+ grammar = lpeg.P { "start",
+ start = number * spaces * number * lpeg.V("series"),
+ series = (spaces * (lpeg.V("one") + lpeg.V("range") + lpeg.V("named")) )^1,
+ one = (number * spaces * number) / do_one,
+ range = (number * periods * number * spaces * number) / do_range,
+ named = (number * spaces * name) / do_name
+ }
+
+ function fonts.otf.load_cidmap(filename) -- lpeg
+ local data = io.loaddata(filename)
+ if data then
+ unicodes, names = { }, { }
+ grammar:match(data)
+ local supplement, registry, ordering = filename:match("^(.-)%-(.-)%-()%.(.-)$")
+ return {
+ supplement = supplement,
+ registry = registry,
+ ordering = ordering,
+ filename = filename,
+ unicodes = unicodes,
+ names = names
+ }
+ else
+ return nil
+ end
end
+
end
fonts.otf.cidmaps = { }
@@ -951,11 +1018,19 @@ function fonts.otf.enhance.before(data,filename)
table.compact(subfont.glyphs)
end
end
+
+--~ for index, glyph in pairs(data.glyphs) do
+--~ for k,v in pairs(glyph) do
+--~ if v == 0 then glyph[k] = nil end
+--~ end
+--~ end
+
end
function fonts.otf.enhance.after(data,filename) -- to be split
if fonts.otf.enhance.add_kerns then
local glyphs, mapmap, unicodes = data.glyphs, data.map.map, data.luatex.unicodes
+ local mkdone = false
for index, glyph in pairs(data.glyphs) do
if glyph.kerns then
local mykerns = { } -- unicode indexed !
@@ -977,8 +1052,13 @@ function fonts.otf.enhance.after(data,filename) -- to be split
end
end
glyph.mykerns = mykerns
+ glyph.kerns = nil -- saves space and time
+ mkdone = true
end
end
+ if mkdone then
+ logs.report("load otf", "replacing 'kerns' tables by 'mykerns' tables")
+ end
if data.gpos then
for _, gpos in ipairs(data.gpos) do
if gpos.subtables then
@@ -990,7 +1070,8 @@ function fonts.otf.enhance.after(data,filename) -- to be split
local maxfirsts, maxseconds = table.getn(firsts), table.getn(seconds)
logs.report("load otf", string.format("adding kernclass %s with %s times %s pairs)",lookup, maxfirsts, maxseconds))
for fk, fv in pairs(firsts) do
- for first in fv:gmatch("(%S+)") do
+ -- for first in fv:gmatch("([^ ]+)") do
+ for first in fv:gmatch("[^ ]+") do
local glyph = glyphs[mapmap[unicodes[first]]]
local mykerns = glyph.mykerns
if not mykerns then
@@ -1003,7 +1084,8 @@ function fonts.otf.enhance.after(data,filename) -- to be split
mykerns[lookup] = lookupkerns
end
for sk, sv in pairs(seconds) do
- for second in sv:gmatch("(%S+)") do
+ -- for second in sv:gmatch("([^ ]+)") do
+ for second in sv:gmatch("[^ ]+") do
lookupkerns[unicodes[second]] = offsets[(fk-1) * maxseconds + sk]
end
end
@@ -1144,15 +1226,15 @@ function fonts.otf.analyze_unicodes(data)
return unicodes
end
-function fonts.otf.analyze_features(g)
+function fonts.otf.analyze_features(g, features)
if g then
local t, done = { }, { }
- for k,v in ipairs(g) do
- local f = v.features
+ for k=1,#g do
+ local f = features or g[k].features
if f then
- for k, v in ipairs(f) do
+ for k=1,#f do
-- scripts and tag
- local tag = v.tag
+ local tag = f[k].tag
if not done[tag] then
t[#t+1] = tag
done[tag] = true
@@ -1167,9 +1249,18 @@ function fonts.otf.analyze_features(g)
return nil
end
-function fonts.otf.valid_subtable(otfdata,language,script,kind)
- local t = otfdata.luatex.subtables
- return t[kind] and t[kind][script] and t[kind][script][language] and t[kind][script][language].lookups
+function fonts.otf.valid_subtable(otfdata,kind,script,language)
+ local tk = otfdata.luatex.subtables[kind]
+ if tk then
+ local tks = tk[script] or tk.dflt
+ if tks then
+ local tksl = tks[language] or tks.dflt
+ if tksl then
+ return tksl.lookups
+ end
+ end
+ end
+ return false
end
function fonts.otf.features.register(name,default)
@@ -1177,59 +1268,70 @@ function fonts.otf.features.register(name,default)
fonts.otf.features.default[name] = default
end
-function fonts.otf.set_features(tfmdata)
+function fonts.otf.set_features(tfmdata) -- node and base, simple mapping
local shared = tfmdata.shared
local otfdata = shared.otfdata
shared.features = fonts.define.check(shared.features,fonts.otf.features.default)
local features = shared.features
-tfmdata.language = tfmdata.language or 'dflt'
-tfmdata.script = tfmdata.script or 'dflt'
+ local trace = fonts.otf.trace_features or fonts.otf.trace_set_features
+ if not tfmdata.language then tfmdata.language = 'dflt' end
+ if not tfmdata.script then tfmdata.script = 'dflt' end
if not table.is_empty(features) then
local gposlist = otfdata.luatex.gposfeatures
local gsublist = otfdata.luatex.gsubfeatures
local mode = tfmdata.mode or fonts.mode
local fi = fonts.initializers[mode]
- if fi and fi.otf then
- local function initialize(list) -- using tex lig and kerning
- if list then
- for _, f in ipairs(list) do
- local value = features[f]
- if value and fi.otf[f] then -- brr
- if fonts.otf.trace_features then
- logs.report("define otf",string.format("initializing feature %s to %s for mode %s for font %s",f,tostring(value),mode or 'unknown', tfmdata.fullname or 'unknown'))
+ if fi then -- todo: delay initilization for mode 'node'
+ local fiotf = fi.otf
+ if fiotf then
+ local done = { }
+ local function initialize(list) -- using tex lig and kerning
+ if list then
+ for i=1,#list do
+ local f = list[i]
+ local value = features[f]
+ if value and fiotf[f] then -- brr
+ if not done[f] then -- so, we can move some to triggers
+ if trace then
+ logs.report("define otf",string.format("initializing feature %s to %s for mode %s for font %s",f,tostring(value),mode or 'unknown', tfmdata.fullname or 'unknown'))
+ end
+ fiotf[f](tfmdata,value) -- can set mode (no need to pass otf)
+ mode = tfmdata.mode or fonts.mode -- keep this, mode can be set local !
+ fi = fonts.initializers[mode]
+ fiotf = fi.otf
+ done[f] = true
+ end
end
- fi.otf[f](tfmdata,value) -- can set mode (no need to pass otf)
- mode = tfmdata.mode or fonts.mode
- fi = fonts.initializers[mode]
end
end
end
+ initialize(fonts.triggers)
+ initialize(gsublist)
+ initialize(gposlist)
end
- initialize(fonts.triggers)
- initialize(gsublist)
- initialize(gposlist)
end
local fm = fonts.methods[mode]
- if fm and fm.otf then
- local function register(list) -- node manipulations
- if list then
- for _, f in ipairs(list) do
- if features[f] and fm.otf[f] then -- brr
- if fonts.otf.trace_features then
- logs.report("define otf",string.format("installing feature handler %s for mode %s for font %s",f,mode or 'unknown', tfmdata.fullname or 'unknown'))
- end
- if not shared.processors then -- maybe also predefine
- shared.processors = { fm.otf[f] }
- else
- shared.processors[#shared.processors+1] = fm.otf[f]
+ if fm then
+ local fmotf = fm.otf
+ local sp = shared.processors
+ if fmotf then
+ local function register(list) -- node manipulations
+ if list then
+ for i=1,#list do
+ local f = list[i]
+ if features[f] and fmotf[f] then -- brr
+ if trace then
+ logs.report("define otf",string.format("installing feature handler %s for mode %s for font %s",f,mode or 'unknown', tfmdata.fullname or 'unknown'))
+ end
+ sp[#sp+1] = fmotf[f]
end
end
end
end
+ register(fonts.triggers)
+ register(gsublist)
+ register(gposlist)
end
- register(fonts.triggers)
- register(gsublist)
- register(gposlist)
end
end
end
@@ -1245,12 +1347,33 @@ function fonts.otf.otf_to_tfm(specification)
if not tfmdata then
local otfdata = fonts.otf.load(filename,format,sub,features and features.featurefile)
if not table.is_empty(otfdata) then
+if true then
+ otfdata._shared_ = otfdata._shared_ or { -- aggressive sharing
+ processes = { },
+ lookuptable = { },
+ featuredata = { },
+ featurecache = { },
+ }
+end
tfmdata = fonts.otf.copy_to_tfm(otfdata)
if not table.is_empty(tfmdata) then
- tfmdata.shared = tfmdata.shared or { }
tfmdata.unique = tfmdata.unique or { }
- tfmdata.shared.otfdata = otfdata
- tfmdata.shared.features = features
+ tfmdata.shared = tfmdata.shared or { } -- combine
+ local shared = tfmdata.shared
+ shared.otfdata = otfdata
+ shared.features = features
+ shared.processors = { }
+ shared.dynamics = { }
+ shared.processes = { }
+ shared.lookuptable = { }
+ shared.featuredata = { }
+ shared.featurecache = { }
+ if otfdata._shared_ then
+ shared.processes = otfdata._shared_.processes
+ shared.lookuptable = otfdata._shared_.lookuptable
+ shared.featuredata = otfdata._shared_.featuredata
+ shared.featurecache = otfdata._shared_.featurecache
+ end
fonts.otf.set_features(tfmdata)
end
end
@@ -1264,21 +1387,24 @@ function fonts.otf.features.prepare_base_kerns(tfmdata,kind,value) -- todo what
local otfdata = tfmdata.shared.otfdata
local charlist = otfdata.glyphs
local unicodes = otfdata.luatex.unicodes
- local somevalid = fonts.otf.some_valid_feature(otfdata,tfmdata.language,tfmdata.script,kind)
+ local somevalid = fonts.otf.some_valid_feature(otfdata,kind,tfmdata.script,tfmdata.language)
for _, chr in pairs(tfmdata.characters) do
local d = charlist[chr.index]
- if d and d.kerns then
- local t, done = chr.kerns or { }, false
- for _, v in pairs(d.kerns) do
- if somevalid[v.lookup] then
- local k = unicodes[v.char]
- if k > 0 then
- t[k], done = v.off, true
+ if d then
+ local dk = d.kerns
+ if dk then
+ local t, done = chr.kerns or { }, false
+ for _, v in pairs(dk) do
+ if somevalid[v.lookup] then
+ local k = unicodes[v.char]
+ if k > 0 then
+ t[k], done = v.off, true
+ end
end
end
- end
- if done then
- chr.kerns = t
+ if done then
+ chr.kerns = t -- no empty assignments
+ end
end
end
end
@@ -1290,25 +1416,32 @@ function fonts.otf.copy_to_tfm(data)
local tfm = { characters = { }, parameters = { } }
local unicodes = data.luatex.unicodes
local characters = tfm.characters
+ local parameters = tfm.parameters
local force = fonts.otf.notdef
+ local zerobox = { 0, 0, 0, 0 }
+ local glyphs = data.glyphs
for k,v in pairs(data.map.map) do
-- k = unicode, v = slot
- local d = data.glyphs[v]
- if d and (force or d.name) then
- local t = {
- index = v,
- unicode = k,
- name = d.name or ".notdef",
- boundingbox = d.boundingbox or nil,
- width = d.width or 0,
- height = d.boundingbox[4] or 0,
- depth = - d.boundingbox[2] or 0,
- class = d.class,
- }
- if d.class == "mark" then
- t.width = - t.width
- end
- characters[k] = t
+ local d = glyphs[v]
+ if d then
+ local name = d.name
+ if force or name then
+ local b = d.boundingbox or zerobox
+ local w = d.width or 0
+ if d.class == "mark" then
+ w = - w
+ end
+ characters[k] = {
+ index = v,
+ unicode = k,
+ name = name or ".notdef",
+ boundingbox = b,
+ width = w,
+ height = b[4],
+ depth = - b[2],
+ class = d.class,
+ }
+ end
end
end
local designsize = data.designsize or data.design_size or 100
@@ -1319,7 +1452,7 @@ function fonts.otf.copy_to_tfm(data)
tfm.units = data.units_per_em or 1000
-- we need a runtime lookup because of running from cdrom or zip, brrr
tfm.filename = input.findbinfile(texmf.instance,data.luatex.filename,"") or data.luatex.filename
- tfm.fullname = data.fullname or data.fontname
+ tfm.fullname = data.fontname or data.fullname
tfm.encodingbytes = 2
tfm.cidinfo = data.cidinfo
tfm.cidinfo.registry = tfm.cidinfo.registry or ""
@@ -1359,13 +1492,13 @@ function fonts.otf.copy_to_tfm(data)
end
end
spaceunits = tonumber(spaceunits) or tfm.units/2 -- 500 -- brrr
- tfm.parameters[1] = 0 -- slant
- tfm.parameters[2] = spaceunits -- space
- tfm.parameters[3] = tfm.units/2 -- 500 -- space_stretch
- tfm.parameters[4] = 2*tfm.units/3 -- 333 -- space_shrink
- tfm.parameters[5] = 4*tfm.units/5 -- 400 -- x_height
- tfm.parameters[6] = tfm.units -- 1000 -- quad
- tfm.parameters[7] = 0 -- extra_space (todo)
+ parameters[1] = 0 -- slant
+ parameters[2] = spaceunits -- space
+ parameters[3] = tfm.units/2 -- 500 -- space_stretch
+ parameters[4] = 2*tfm.units/3 -- 333 -- space_shrink
+ parameters[5] = 4*tfm.units/5 -- 400 -- x_height
+ parameters[6] = tfm.units -- 1000 -- quad
+ parameters[7] = 0 -- extra_space (todo)
if spaceunits < 2*tfm.units/5 then
-- todo: warning
end
@@ -1373,21 +1506,21 @@ function fonts.otf.copy_to_tfm(data)
tfm.ascender = math.abs(data.ascent or 0)
tfm.descender = math.abs(data.descent or 0)
if data.italicangle then -- maybe also in afm _
- tfm.parameters[1] = tfm.parameters[1] - math.round(math.tan(data.italicangle*math.pi/180))
+ parameters[1] = parameters[1] - math.round(math.tan(data.italicangle*math.pi/180))
end
if data.isfixedpitch then
- tfm.parameters[3] = 0
- tfm.parameters[4] = 0
+ parameters[3] = 0
+ parameters[4] = 0
elseif fonts.otf.syncspace then --
- tfm.parameters[3] = spaceunits/2 -- space_stretch
- tfm.parameters[4] = spaceunits/3 -- space_shrink
+ parameters[3] = spaceunits/2 -- space_stretch
+ parameters[4] = spaceunits/3 -- space_shrink
end
if data.pfminfo and data.pfminfo.os2_xheight and data.pfminfo.os2_xheight > 0 then
- tfm.parameters[5] = data.pfminfo.os2_xheight
+ parameters[5] = data.pfminfo.os2_xheight
else
local x = characters[unicodes['x']]
if x then
- tfm.parameters[5] = x.height
+ parameters[5] = x.height
end
end
-- [6]
@@ -1421,72 +1554,111 @@ function fonts.tfm.read_from_open_type(specification)
return tfmtable
end
+function fonts.otf.analyze_only(otfdata)
+ local analyze = fonts.otf.analyze_features
+ return analyze(otfdata.gpos), analyze(otfdata.gsub)
+end
+
+local a_to_script = { }
+local a_to_language = { }
+
+do
+
+ local context_setups = fonts.define.specify.context_setups
+ local context_numbers = fonts.define.specify.context_numbers
+
+ function fonts.otf.set_dynamics(tfmdata,attribute,features) --currently experimental and slow / hackery
+ local shared = tfmdata.shared
+ local dynamics = shared.dynamics
+ if dynamics then
+ features = features or context_setups[context_numbers[attribute]]
+ if features then
+ local script = features.script or 'dflt'
+ local language = features.language or 'dflt'
+ local ds = dynamics[script]
+ if not ds then
+ ds = { }
+ dynamics[script] = ds
+ end
+ local dsl = ds[language]
+ if not dsl then
+ dsl = { }
+ ds[language] = dsl
+ end
+ local dsla = dsl[attribute]
+ if dsla then
+ return dsla
+ else
+ a_to_script [attribute] = script
+ a_to_language[attribute] = language
+ dsla = { }
+ local otfdata = shared.otfdata
+ local methods = fonts.methods.node.otf
+ local initializers = fonts.initializers.node.otf
+ local gposfeatures, gsubfeatures = fonts.otf.analyze_only(otfdata,features)
+ local default = fonts.otf.features.default
+ local function register(list)
+ if list then
+ for i=1,#list do
+ local f = list[i]
+ local value = features[f] or default[f]
+ if value then
+ local i, m = initializers[f], methods[f]
+ if i then
+ i(tfmdata,value)
+ end
+ if m then
+ dsla[#dsla+1] = m
+ end
+ end
+ end
+ end
+ end
+ register(fonts.triggers)
+ register(gsubfeatures)
+ register(gposfeatures)
+ dynamics[script][language][attribute] = dsla
+ return dsla
+ end
+ end
+ end
+ return { } -- todo: false
+ end
+
+end
+
-- scripts
fonts.otf.default_language = 'latn'
fonts.otf.default_script = 'dflt'
---~ function fonts.otf.valid_feature(otfdata,language,script) -- return hash is faster
---~ local language = language or fonts.otf.default_language
---~ local script = script or fonts.otf.default_script
---~ if not (script and language) then
---~ return boolean.alwaystrue
---~ else
---~ language = string.padd(language:lower(),4)
---~ script = string.padd(script:lower (),4)
---~ local t = { }
---~ for k,v in pairs(otfdata.luatex.subtables) do
---~ local vv = v[script]
---~ if vv and vv[language] then
---~ t[k] = vv[language].valid
---~ end
---~ end
---~ local always = otfdata.luatex.always_valid -- for the moment not per feature
---~ --~ return function(kind,tag) -- is the kind test needed
---~ --~ return always[tag] or (kind and t[kind] and t[kind][tag])
---~ --~ end
---~ return function(kind,tag) -- better inline
---~ return always[tag] or (t[kind] and t[kind][tag])
---~ end
---~ end
---~ end
-
-function fonts.otf.valid_feature(otfdata,language,script,feature) -- return hash is faster
- local language = language or fonts.otf.default_language
+function fonts.otf.valid_feature(otfdata,kind,script,language) -- return hash is faster
local script = script or fonts.otf.default_script
+ local language = language or fonts.otf.default_language
if not (script and language) then
return true
else
- language = string.padd(language:lower(),4)
- script = string.padd(script:lower (),4)
---~ local t = { }
---~ for k,v in pairs(otfdata.luatex.subtables) do
---~ local vv = v[script]
---~ if vv and vv[language] then
---~ t[k] = vv[language].valid
---~ end
---~ end
- local ft = otfdata.luatex.subtables[feature]
+ script, language = script:lower(), language:lower() -- will go away, we will lowercase values
+ local ft = otfdata.luatex.subtables[kind]
local st = ft[script]
return false, otfdata.luatex.always_valid, st and st[language] and st[language].valid
end
end
-function fonts.otf.some_valid_feature(otfdata,language,script,kind)
- local language = language or fonts.otf.default_language
+function fonts.otf.some_valid_feature(otfdata,kind,script,language)
local script = script or fonts.otf.default_script
+ local language = language or fonts.otf.default_language
if not (script and language) then
return boolean.alwaystrue
else
- language = string.padd(language:lower(),4)
- script = string.padd(script:lower (),4)
+ script, language = script:lower(), language:lower() -- will go away, we will lowercase values
local t = otfdata.luatex.subtables[kind]
if t and t[script] and t[script][language] and t[script][language].valid then
return t[script][language].valid
else
return { }
end
---~ return (t and t[script][language] and t[script][language].valid) or { }
+ -- return (t and t[script] and t[script][language] and t[script][language].valid) or { }
end
end
@@ -1497,6 +1669,7 @@ function fonts.otf.features.aux.resolve_ligatures(tfmdata,ligatures,kind)
local changed = tfmdata.changed or { }
local done = { }
kind = kind or "unknown"
+ local trace = fonts.otf.trace_features
while true do
local ok = false
for k,v in pairs(ligatures) do
@@ -1507,18 +1680,22 @@ function fonts.otf.features.aux.resolve_ligatures(tfmdata,ligatures,kind)
local c, f, s = chars[v[2]], ligs[1], ligs[2]
local uf, us = unicodes[f], unicodes[s]
if changed[uf] or changed[us] then
- if fonts.otf.trace_features then
+ if trace then
logs.report("define otf",string.format("%s: %s (%s) + %s (%s) ignored",kind,f,uf,s,us))
end
else
local first, second = chars[uf], us
if first and second then
- if not first.ligatures then first.ligatures = { } end
- first.ligatures[second] = {
+ local t = first.ligatures
+ if not t then
+ t = { }
+ first.ligatures = t
+ end
+ t[second] = {
char = unicodes[c.name],
type = 0
}
- if fonts.otf.trace_features then
+ if trace then
logs.report("define otf",string.format("%s: %s (%s) + %s (%s) = %s (%s)",kind,f,uf,s,us,c.name,unicodes[c.name]))
end
end
@@ -1549,15 +1726,15 @@ function fonts.otf.features.prepare_base_substitutions(tfmdata,kind,value) -- we
local unicodes = otfdata.luatex.unicodes
local trace = fonts.otf.trace_features
local chars = tfmdata.characters
- local somevalid = fonts.otf.some_valid_feature(otfdata,tfmdata.language,tfmdata.script,kind)
+ local somevalid = fonts.otf.some_valid_feature(otfdata,kind,tfmdata.script,tfmdata.language)
tfmdata.changed = tfmdata.changed or { }
local changed = tfmdata.changed
+ local glyphs = otfdata.glyphs
for k,c in pairs(chars) do
- local o = otfdata.glyphs[c.index]
+ local o = glyphs[c.index]
if o and o.lookups then
for lookup,ps in pairs(o.lookups) do
---~ if valid(kind,lookup) then -- can be optimized for #p = 1
-if somevalid[lookup] then -- can be optimized for #p = 1
+ if somevalid[lookup] then
for i=1,#ps do
local p = ps[i]
local t = p.type
@@ -1578,7 +1755,7 @@ if somevalid[lookup] then -- can be optimized for #p = 1
end
elseif t == 'alternate' then
local pa = p.specification if pa and pa.components then
- local pc = pa.components:match("(%S+)")
+ local pc = pa.components:match("([^ ]+)")
if pc then
local upc = unicodes[pc]
if upc and chars[upc] then
@@ -1592,11 +1769,14 @@ if somevalid[lookup] then -- can be optimized for #p = 1
end
elseif t == 'ligature' and not changed[k] then
local pl = p.specification
- if pl and pl.components then
- if trace then
- logs.report("define otf",string.format("%s: %s => %s (%s)",kind,pl.components,chars[k].name,k))
+ if pl then
+ local plc = pl.components
+ if plc then
+ if trace then
+ logs.report("define otf",string.format("%s: %s => %s (%s)",kind,plc,chars[k].name,k))
+ end
+ ligatures[#ligatures+1] = { plc, k }
end
- ligatures[#ligatures+1] = { pl.components, k }
end
end
end
@@ -1642,7 +1822,7 @@ fonts.otf.features.data.tex = {
--~ 0x201D 0x2019 0x2019
--~ 0x201E 0X002C 0x002C
-function fonts.initializers.base.otf.texligatures(tfm,value)
+function fonts.initializers.base.otf.tlig(tfm,value)
local otfdata = tfm.shared.otfdata
local unicodes = otfdata.luatex.unicodes
local ligatures = { }
@@ -1656,21 +1836,20 @@ function fonts.initializers.base.otf.texligatures(tfm,value)
ligatures[#ligatures+1] = { v[2], v[1] }
end
end
- fonts.otf.features.aux.resolve_ligatures(tfm,ligatures)
+ fonts.otf.features.aux.resolve_ligatures(tfm,ligatures,'tlig')
end
-function fonts.initializers.base.otf.texquotes(tfm,value)
+function fonts.initializers.base.otf.trep(tfm,value)
tfm.characters[0x0022] = table.fastcopy(tfm.characters[0x201D])
tfm.characters[0x0027] = table.fastcopy(tfm.characters[0x2019])
tfm.characters[0x0060] = table.fastcopy(tfm.characters[0x2018])
end
-fonts.initializers.base.otf.trep = fonts.initializers.base.otf.texquotes
-fonts.initializers.base.otf.tlig = fonts.initializers.base.otf.texligatures
-
-table.insert(fonts.triggers,"texquotes")
-table.insert(fonts.triggers,"texligatures")
table.insert(fonts.triggers,"tlig")
+table.insert(fonts.triggers,"trep")
+
+fonts.define.specify.synonyms["texquotes"] = "trep"
+fonts.define.specify.synonyms["texligatures"] = "tlig"
-- Here comes the real thing ... node processing! The next session prepares
-- things. The main features (unchained by rules) have their own caches,
@@ -1680,37 +1859,37 @@ do
fonts.otf.features.prepare = { }
- -- also share vars
-
- function fonts.otf.features.prepare.feature(tfmdata,kind,value) -- check BASE VS NODE
+ function fonts.otf.features.prepare.feature(tfmdata,kind,value)
if value then
- tfmdata.unique = tfmdata.unique or { }
- tfmdata.shared = tfmdata.shared or { }
+ local language, script = tfmdata.language or "dflt", tfmdata.script or "dflt"
local shared = tfmdata.shared
- shared.featuredata = shared.featuredata or { }
- shared.featuredata[kind] = shared.featuredata[kind] or { }
- shared.featurecache = shared.featurecache or { }
- shared.featurecache[kind] = false -- signal
local otfdata = shared.otfdata
- local lookuptable = fonts.otf.valid_subtable(otfdata,tfmdata.language,tfmdata.script,kind)
- shared.lookuptable = shared.lookuptable or { }
- shared.lookuptable[kind] = lookuptable
+ local lookuptable = fonts.otf.valid_subtable(otfdata,kind,script,language)
if lookuptable then
- shared.processes = shared.processes or { }
- shared.processes[kind] = shared.processes[kind] or { }
- local processes = shared.processes[kind]
- local types = otfdata.luatex.name_to_type
- local flags = otfdata.luatex.ignore_flags
- local preparers = fonts.otf.features.prepare
- local process = fonts.otf.features.process
- for noflookups, lookupname in ipairs(lookuptable) do
- local lookuptype = types[lookupname]
- local prepare = preparers[lookuptype]
- if prepare then
- local processdata = prepare(tfmdata,kind,lookupname)
- if processdata then
- local processflags = flags[lookupname] or {false,false,false}
- processes[#processes+1] = { process[lookuptype], lookupname, processdata, processflags }
+ local fullkind = kind .. script .. language
+ if not shared.lookuptable [fullkind] then
+ --~ print(tfmdata,file.basename(tfmdata.fullname or ""),kind,script,language,lookuptable,fullkind)
+ local processes = { }
+ -- featuredata and featurecache are indexed by lookup so we can share them
+ shared.featuredata [kind] = shared.featuredata [kind] or { }
+ shared.featurecache[kind] = shared.featurecache[kind] or false -- signal
+ shared.lookuptable [fullkind] = lookuptable
+ shared.processes [fullkind] = processes
+ local types = otfdata.luatex.name_to_type
+ local flags = otfdata.luatex.ignore_flags
+ local preparers = fonts.otf.features.prepare
+ local process = fonts.otf.features.process
+ local falsetable = { false, false, false }
+ for i=1,#lookuptable do
+ local lookupname = lookuptable[i]
+ local lookuptype = types[lookupname]
+ local prepare = preparers[lookuptype]
+ if prepare then
+ local processdata = prepare(tfmdata,kind,lookupname)
+ if processdata then
+ local processflags = flags[lookupname] or falsetable --- share false table
+ processes[#processes+1] = { process[lookuptype], lookupname, processdata, processflags }
+ end
end
end
end
@@ -1721,7 +1900,9 @@ do
-- helper: todo, we don't need to store non local ones for chains so we can pass the
-- validator as parameter
- function fonts.otf.features.collect_ligatures(tfmdata,kind,internal) -- ligs are spread all over the place
+ local pairs = pairs
+
+ function fonts.otf.features.collect_ligatures(tfmdata,kind) -- ligs are spread all over the place
local otfdata = tfmdata.shared.otfdata
local unicodes = tfmdata.shared.otfdata.luatex.unicodes -- actually the char index is ok too
local trace = fonts.otf.trace_features
@@ -1739,7 +1920,7 @@ do
ligatures[lookup] = t
end
local first = true
- for s in p.specification.components:gmatch("(%S+)") do
+ for s in p.specification.components:gmatch("[^ ]+") do
local u = unicodes[s]
if first then
if not t[u] then
@@ -1748,44 +1929,27 @@ do
t = t[u]
first = false
else
- if not t[1][u] then
- t[1][u] = { { } }
+ local t1 = t[1]
+ if not t1[u] then
+ t1[u] = { { } }
end
- t = t[1][u]
+ t = t1[u]
end
end
t[2] = o.unicodeenc
end
end
end
- if internal then
- local always = otfdata.luatex.always_valid
- for _,o in pairs(otfdata.glyphs) do
- if o.lookups then
- for lookup, ps in pairs(o.lookups) do
- if always[lookup] then
- collect(lookup,o,ps)
- end
- end
- end
- end
- else -- check if this valid is still ok
---~ local valid = fonts.otf.valid_feature(otfdata,tfmdata.language,tfmdata.script)
- local forced, always, okay = fonts.otf.valid_feature(otfdata,tfmdata.language,tfmdata.script,kind)
- for _,o in pairs(otfdata.glyphs) do
- if o.lookups then
---~ for lookup, ps in pairs(o.lookups) do
---~ if valid(kind,lookup) then
---~ collect(lookup,o,ps)
---~ end
---~ end
- if forced then
- for lookup, ps in pairs(o.lookups) do collect(lookup,o,ps) end
- elseif okay then
- for lookup, ps in pairs(o.lookups) do if always[lookup] or okay[lookup] then collect(lookup,o,ps) end end
- else
- for lookup, ps in pairs(o.lookups) do if always[lookup] then collect(lookup,o,ps) end end
- end
+ local forced, always, okay = fonts.otf.valid_feature(otfdata,kind,tfmdata.script,tfmdata.language)
+ for _,o in pairs(otfdata.glyphs) do
+ local lookups = o.lookups
+ if lookups then
+ if forced then
+ for lookup, ps in pairs(lookups) do collect(lookup,o,ps) end
+ elseif okay then
+ for lookup, ps in pairs(lookups) do if always[lookup] or okay[lookup] then collect(lookup,o,ps) end end
+ else
+ for lookup, ps in pairs(lookups) do if always[lookup] then collect(lookup,o,ps) end end
end
end
end
@@ -1855,7 +2019,8 @@ do
if p.specification and p.type == 'multiple' then
local old, new = o.unicodeenc, { }
substitutions[old] = new
- for pc in p.specification.components:gmatch("(%S+)") do
+ -- for pc in p.specification.components:gmatch("([^ ]+)") do
+ for pc in p.specification.components:gmatch("[^ ]+") do
new[#new+1] = unicodes[pc]
end
if trace then
@@ -1891,7 +2056,8 @@ do
if p.specification and p.type == 'alternate' then
local old = o.unicodeenc
local t = { }
- for pc in p.specification.components:gmatch("(%S+)") do
+ -- for pc in p.specification.components:gmatch("([^ ]+)") do
+ for pc in p.specification.components:gmatch("[^ ]+") do
t[#t+1] = unicodes[pc]
end
substitutions[old] = t
@@ -1921,7 +2087,7 @@ do
end
function fonts.otf.features.prepare.contextchain(tfmdata,kind,lookupname)
- local featuredata = tfmdata.shared.featuredata[kind]
+ local featuredata = tfmdata.shared.featuredata[kind]
local contexts = featuredata[lookupname]
if not contexts then
featuredata[lookupname] = { }
@@ -1932,16 +2098,18 @@ do
local flags = otfdata.luatex.ignore_flags
local types = otfdata.luatex.name_to_type
otfdata.luatex.covers = otfdata.luatex.covers or { }
- local cache = otfdata.luatex.covers
local characters = tfmdata.characters
+ local cache = otfdata.luatex.covers
local function uncover(covers)
+ -- lpeg hardly faster (.005 sec on mk)
if covers then
local result = { }
- for n, c in ipairs(covers) do
+ for n=1,#covers do
+ local c = covers[n]
local cc = cache[c]
if not cc then
local t = { }
- for s in c:gmatch("(%S+)") do
+ for s in c:gmatch("[^ ]+") do
t[unicodes[s]] = true
end
cache[c] = t
@@ -1959,7 +2127,9 @@ do
if not lookupdata then
logs.error("otf process", string.format("missing lookupdata table %s",lookupname))
elseif lookupdata.rules then
- for nofrules, rule in ipairs(lookupdata.rules) do
+ local rules = lookupdata.rules
+ for nofrules=1,#rules do
+ local rule = rules[nofrules]
local coverage = rule.coverage
if coverage and coverage.current then
local current = uncover(coverage.current)
@@ -2016,9 +2186,11 @@ do
local validanchors = { }
local glyphs = otfdata.glyphs
if otfdata.anchor_classes then
- for k,v in ipairs(otfdata.anchor_classes) do
- if v.lookup == lookupname then
- validanchors[v.name] = true
+ local classes = otfdata.anchor_classes
+ for k=1,#classes do
+ local class = classes[k]
+ if class.lookup == lookupname then
+ validanchors[class.name] = true
end
end
end
@@ -2067,6 +2239,7 @@ do
local featuredata = tfmdata.shared.featuredata[kind]
local kerns = featuredata[lookupname]
if not kerns then
+ local trace = fonts.otf.trace_features
featuredata[lookupname] = { }
kerns = featuredata[lookupname]
local otfdata = tfmdata.shared.otfdata
@@ -2087,25 +2260,27 @@ do
else
kerns[one] = { two = off }
end
- if fonts.otf.trace_features then
+ if trace then
logs.report("define otf",string.format("feature %s kern pair %s - %s",kind,one,two))
end
end
end
elseif o.kerns then
local one = o.unicodeenc
- for _, l in ipairs(o.kerns) do
- if l.lookup == lookupname then
- local char = l.char
+ local okerns = o.kerns
+ for ok=1,#okerns do
+ local k = okerns[ok]
+ if k.lookup == lookupname then
+ local char = k.char
if char then
local two = unicodes[char]
local krn = kerns[one]
if krn then
- krn[two] = l.off
+ krn[two] = k.off
else
- kerns[one] = { two = l.off }
+ kerns[one] = { two = k.off }
end
- if fonts.otf.trace_features then
+ if trace then
logs.report("define otf",string.format("feature %s kern pair %s - %s",kind,one,two))
end
end
@@ -2128,7 +2303,7 @@ do
else
kerns[one] = { two = specification.offsets }
end
- if fonts.otf.trace_features then
+ if trace then
logs.report("define otf",string.format("feature %s kern pair %s - %s",kind,one,two))
end
end
@@ -2232,11 +2407,11 @@ end
do
+ -- todo: use nodes helpers
+
local glyph = node.id('glyph')
local glue = node.id('glue')
- local kern_node = node.new("kern")
- local glue_node = node.new("glue")
- local glyph_node = node.new("glyph")
+ local disc = node.id('disc')
local fontdata = fonts.tfm.id
local has_attribute = node.has_attribute
@@ -2245,10 +2420,11 @@ do
local marknumber = attributes.numbers['mark'] or 200
local format = string.format
local report = logs.report
+ local scale = tex.scale
fonts.otf.features.process = { }
- -- we share aome vars here, after all, we have no nested lookups and
+ -- we share some vars here, after all, we have no nested lookups and
-- less code
local tfmdata = false
@@ -2258,22 +2434,38 @@ do
local glyphs = false
local currentfont = false
- function fonts.otf.features.process.feature(head,font,kind,attribute)
+ -- we cheat a bit and assume that a font,attr combination are kind of ranged
+
+ local context_setups = fonts.define.specify.context_setups
+ local context_numbers = fonts.define.specify.context_numbers
+
+ function fonts.otf.features.process.feature(head,font,attr,kind,attribute)
tfmdata = fontdata[font]
- otfdata = tfmdata.shared.otfdata
+ local shared = tfmdata.shared
+ otfdata = shared.otfdata
characters = tfmdata.characters
marks = otfdata.luatex.marks
glyphs = otfdata.glyphs
currentfont = font
- local lookuptable = tfmdata.shared.lookuptable[kind]
+ local script, language
+ if attr and attr > 0 then
+ local features = context_setups[context_numbers[attr]]
+ language, script = features.language or "dflt", features.script or "dflt"
+ else
+ language, script = tfmdata.language or "dflt", tfmdata.script or "dflt"
+ end
+ local fullkind = kind .. script .. language
+ local lookuptable = shared.lookuptable[fullkind]
if lookuptable then
local types = otfdata.luatex.name_to_type
local start, done, ok = head, false, false
- local processes = tfmdata.shared.processes[kind]
+ local processes = shared.processes[fullkind]
if #processes == 1 then
local p = processes[1]
- while start do
- if start.id == glyph and start.font == font and (not attribute or has_attribute(start,state,attribute)) then
+ while start do -- evt splitsen
+ if start.id == glyph and start.subtype<256 and start.font == font and
+ (not attr or has_attribute(start,0,attr)) and -- dynamic feature
+ (not attribute or has_attribute(start,state,attribute)) then
-- we can make the p vars also global to this closure
local pp = p[3] -- all lookups
local pc = pp[start.char]
@@ -2290,7 +2482,9 @@ do
end
else
while start do
- if start.id == glyph and start.font == font and (not attribute or has_attribute(start,state,attribute)) then
+ if start.id == glyph and start.subtype<256 and start.font == font and
+ (not attr or has_attribute(start,0,attr)) and -- dynamic feature
+ (not attribute or has_attribute(start,state,attribute)) then
for i=1,#processes do local p = processes[i]
local pp = p[3]
local pc = pp[start.char]
@@ -2316,59 +2510,92 @@ do
end
end
- -- todo: components / else subtype 0 / maybe we should be able to force this
+ -- we can assume that languages that use marks are not hyphenated
+ -- we can also assume that at most one discretionary is present
- local function toligature(start,stop,char,markflag)
+ local function toligature(start,stop,char,markflag,discfound) -- brr head
if start ~= stop then
- local deletemarks = markflag ~= "mark"
- start.components = node.copy_list(start,stop)
- node.slide(start.components)
- -- todo: components
- start.subtype = 1
- start.char = char
- local marknum = 1
- local next = start.next
- while true do
- if marks[next.char] then
- if not deletemarks then
- set_attribute(next,marknumber,marknum)
- end
- else
- marknum = marknum + 1
- end
- if next == stop then
- break
- else
- next = next.next
- end
- end
- next = stop.next
- while next do
- if next.id == glyph and next.font == currentfont and marks[next.char] then
- set_attribute(next,marknumber,marknum)
- next = next.next
- else
- break
+ if discfound then
+ local lignode = node.copy(start)
+ lignode.font = start.font
+ lignode.char = char
+ lignode.subtype = 2
+ start = node.do_ligature_n(start, stop, lignode)
+ if start.id == disc then
+ local prev = start.prev
+ start = start.next
end
- end
- local next = start.next
- while true do
- if next == stop or deletemarks or marks[next.char] then
- local crap = next
- next.prev.next = next.next
- if next.next then
- next.next.prev = next.prev
+ else
+ local deletemarks = markflag ~= "mark"
+ start.components = node.copy_list(start,stop)
+ node.slide(start.components)
+ -- todo: components
+ start.subtype = 2
+ start.char = char
+ local marknum = 1
+ local next = start.next
+ while true do
+ if marks[next.char] then
+ if not deletemarks then
+ set_attribute(next,marknumber,marknum)
+ end
+ else
+ marknum = marknum + 1
end
if next == stop then
- stop = crap.prev
- node.free(crap)
break
else
next = next.next
- node.free(crap)
end
- else
- next = next.next
+ end
+ next = stop.next
+ while next do
+ if next.id == glyph and next.font == currentfont and marks[next.char] then
+ set_attribute(next,marknumber,marknum)
+ next = next.next
+ else
+ break
+ end
+ end
+ local next = start.next
+--~ while true do
+--~ if next == stop or deletemarks or marks[next.char] then
+--~ local crap = next
+--~ next.prev.next = next.next
+--~ if next.next then
+--~ next.next.prev = next.prev
+--~ end
+--~ if next == stop then
+--~ stop = crap.prev
+--~ node.free(crap)
+--~ break
+--~ else
+--~ next = next.next
+--~ node.free(crap)
+--~ end
+--~ else
+--~ next = next.next
+--~ end
+--~ end
+ while true do
+ if next == stop or deletemarks or marks[next.char] then
+ local crap = next
+ local np, nn = next.prev, next.next
+ np.next = nn
+ if nn then
+ nn.prev = np
+ end
+ if next == stop then
+ stop = crap.prev
+ node.free(crap)
+ break
+ else
+ next = nn
+ node.free(crap)
+ end
+ else
+ next = nn
+ end
end
end
end
@@ -2405,11 +2632,12 @@ do
if #multiples > 1 then
for k=2,#multiples do
local n = node.copy(start)
+ local sn = start.next
n.char = multiples[k]
- n.next = start.next
+ n.next = sn
n.prev = start
- if start.next then
- start.next.prev = n
+ if sn then
+ sn.prev = n
end
start.next = n
start = n
@@ -2425,23 +2653,35 @@ do
end
function fonts.otf.features.process.gsub_ligature(start,kind,lookupname,ligatures,alldata,flags)
- local s, stop = start.next, nil
- while s and s.id == glyph and s.subtype == 0 and s.font == currentfont do
- if marks[s.char] then
- s = s.next
- else
- local lg = ligatures[1][s.char]
- if not lg then
- break
+ local s, stop, discfound = start.next, nil, false
+ while s do
+ local id = s.id
+ if id == glyph and s.subtype<256 then
+ if s.font == currentfont then
+ if marks[s.char] then
+ s = s.next
+ else
+ local lg = ligatures[1][s.char]
+ if not lg then
+ break
+ else
+ stop = s
+ ligatures = lg
+ s = s.next
+ end
+ end
else
- stop = s
- ligatures = lg
- s = s.next
+ break
end
+ elseif id == disc then
+ discfound = true
+ s = s.next
+ else
+ break
end
end
if stop and ligatures[2] then
- start = toligature(start,stop,ligatures[2],flags[1])
+ start = toligature(start,stop,ligatures[2],flags[1],discfound)
if fonts.otf.trace_ligatures then
report("process otf",format("%s: inserting ligature %s (%s)",kind,start.char,utf.char(start.char)))
end
@@ -2456,9 +2696,10 @@ do
local bases = baseanchors['basechar']
if bases then
local component = start.next
- if component and component.id == glyph and component.font == currentfont and marks[component.char] then
+ if component and component.id == glyph and component.subtype<256 and component.font == currentfont and marks[component.char] then
local trace = fonts.otf.trace_anchors
local last, done = start, false
+ local factor = tfmdata.factor
while true do
local markanchors = anchors[component.char]
if markanchors then
@@ -2467,8 +2708,8 @@ do
for anchor,data in pairs(marks) do
local ba = bases[anchor]
if ba then
- local dx = tex.scale(ba.x-data.x, tfmdata.factor)
- local dy = tex.scale(ba.y-data.y, tfmdata.factor)
+ local dx = scale(ba.x-data.x, factor)
+ local dy = scale(ba.y-data.y, factor)
component.xoffset = start.xoffset - dx
component.yoffset = start.yoffset + dy
if trace then
@@ -2485,7 +2726,7 @@ do
--~ if component and component.id == kern then
--~ component = component.next
--~ end
- if component and component.id == glyph and component.font == currentfont and marks[component.char] then
+ if component and component.id == glyph and component.subtype<256 and component.font == currentfont and marks[component.char] then
-- ok
else
break
@@ -2501,9 +2742,10 @@ do
local bases = baseanchors['baselig']
if bases then
local component = start.next
- if component and component.id == glyph and component.font == currentfont and marks[component.char] then
+ if component and component.id == glyph and component.subtype<256 and component.font == currentfont and marks[component.char] then
local trace = fonts.otf.trace_anchors
local last, done = start, false
+ local factor = tfmdata.factor
while true do
local markanchors = anchors[component.char]
if markanchors then
@@ -2515,8 +2757,8 @@ do
local n = has_attribute(component,marknumber)
local ban = ba[n]
if ban then
- local dx = tex.scale(ban.x-data.x, tfmdata.factor)
- local dy = tex.scale(ban.y-data.y, tfmdata.factor)
+ local dx = scale(ban.x-data.x, factor)
+ local dy = scale(ban.y-data.y, factor)
component.xoffset = start.xoffset - dx
component.yoffset = start.yoffset + dy
if trace then
@@ -2534,7 +2776,7 @@ do
--~ if component and component.id == kern then
--~ component = component.next
--~ end
- if component and component.id == glyph and component.font == currentfont and marks[component.char] then
+ if component and component.id == glyph and component.subtype<256 and component.font == currentfont and marks[component.char] then
-- ok
else
break
@@ -2551,10 +2793,11 @@ do
local bases = baseanchors['basemark']
if bases then
local component = start.next
- if component and component.id == glyph and component.font == currentfont and marks[component.char] then
+ if component and component.id == glyph and component.subtype<256 and component.font == currentfont and marks[component.char] then
local baseattr = has_attribute(start,marknumber) or 1
local trace = fonts.otf.trace_anchors
local last, done = start, false
+ local factor = tfmdata.factor
while true do
local markattr = has_attribute(component,marknumber) or 1
if baseattr == markattr then
@@ -2565,12 +2808,12 @@ do
for anchor,data in pairs(marks) do
local ba = bases[anchor]
if ba then
- local dx = tex.scale(ba.x-data.x, tfmdata.factor)
- local dy = tex.scale(ba.y-data.y, tfmdata.factor)
+ local dx = scale(ba.x-data.x, factor)
+ local dy = scale(ba.y-data.y, factor)
component.xoffset = start.xoffset - dx
component.yoffset = start.yoffset + dy
if trace then
- report("process otf",format("%s:%s:%s anchoring mark %s to basemark %s => (%s,%s) => (%s,%s)",kind,anchor,n,start.char,component.char,dx,dy,component.xoffset,component.yoffset))
+ report("process otf",format("%s:%s:%s anchoring mark %s to basemark %s => (%s,%s) => (%s,%s)",kind,anchor,markattr,start.char,component.char,dx,dy,component.xoffset,component.yoffset))
end
done = true
break
@@ -2583,7 +2826,7 @@ do
--~ if component and component.id == kern then
--~ component = component.next
--~ end
- if component and component.id == glyph and component.font == currentfont and marks[component.char] then
+ if component and component.id == glyph and component.subtype<256 and component.font == currentfont and marks[component.char] then
-- ok
else
break
@@ -2603,11 +2846,12 @@ do
local next, done, x, y, total, t, first = start.next, false, 0, 0, 0, { }, nil
local function finish()
local i = 0
+ local factor = tfmdata.factor
while first do
if characters[first.char].class == 'mark' then
first = first.next
else
- first.yoffset = tex.scale(total, tfmdata.factor)
+ first.yoffset = scale(total, factor)
if first == next then
break
else
@@ -2620,7 +2864,7 @@ do
x, y, total, t, first = 0, 0, 0, { }, nil
end
while next do
- if next.id == glyph and next.font == currentfont then
+ if next.id == glyph and next.subtype<256 and next.font == currentfont then
local nextchar = next.char
if marks[nextchar] then
next = next.next
@@ -2664,7 +2908,9 @@ do
function fonts.otf.features.process.gpos_pair(start,kind,lookupname,basekerns,kerns)
local next, prev, done = start.next, start, false
-- to be optimized
- while next and next.id == glyph and next.font == currentfont do
+ local trace = fonts.otf.trace_kerns
+ local factor = tfmdata.factor
+ while next and next.id == glyph and next.subtype<256 and next.font == currentfont do
if characters[next.char].class == 'mark' then
prev = next
next = next.next
@@ -2675,8 +2921,7 @@ do
elseif type(krn) == "table" then
local a, b = krn[1], krn[2]
if a and a.x then
- local k = node.copy(kern_node)
- k.kern = tex.scale(a.x,fontdata[currentfont].factor) -- tfmdata.factor
+ local k = nodes.kern(scale(a.x,factor))
if b and b.x then
report("otf process","we need to do something with the second kern xoff " .. b.x)
end
@@ -2684,14 +2929,13 @@ do
k.prev = prev
prev.next = k
next.prev = k
- if fonts.otf.trace_kerns then
+ if trace then
-- todo
end
end
else
- -- todo, just start, next = node.insert_before(head,next,nodes.kern(tex.scale(kern,fontdata[currentfont].factor)))
- local k = node.copy(kern_node)
- k.kern = tex.scale(krn,fontdata[currentfont].factor) -- tfmdata.factor
+ -- todo, just start, next = node.insert_before(head,next,nodes.kern(scale(kern,factor)))
+ local k = nodes.kern(scale(krn,factor))
k.next = next
k.prev = prev
prev.next = k
@@ -2759,7 +3003,8 @@ do
local lv = looks[lookups[l]]
if lv then
replacement = { }
- for c in lv[1].specification.components:gmatch("(%S+)") do
+ -- for c in lv[1].specification.components:gmatch("([^ ]+)") do
+ for c in lv[1].specification.components:gmatch("[^ ]+") do
replacement[#replacement+1] = unicodes[c]
end
cacheslot[char] = replacement
@@ -2782,11 +3027,12 @@ do
if #replacement > 1 then
for k=2,#replacement do
local n = node.copy(start)
+ local sn = start.next
n.char = replacement[k]
- n.next = start.next
+ n.next = sn
n.prev = start
- if start.next then
- start.next.prev = n
+ if sn then
+ sn.prev = n
end
start.next = n
start = n
@@ -2809,7 +3055,8 @@ do
local lv = looks[lookups[l]]
if lv then
replacement = { }
- for c in lv[1].specification.components:gmatch("(%S+)") do
+ -- for c in lv[1].specification.components:gmatch("([^ ]+)") do
+ for c in lv[1].specification.components:gmatch("[^ ]+") do
replacement[#replacement+1] = unicodes[c]
end
cacheslot[char] = replacement
@@ -2836,19 +3083,22 @@ do
if lookups then
local featurecache = fontdata[currentfont].shared.featurecache
if not featurecache[kind] then
- featurecache[kind] = fonts.otf.features.collect_ligatures(tfmdata,kind)
- -- to be tested: only collect internal
- -- featurecache[kind] = fonts.otf.features.collect_ligatures(tfmdata,kind,true) --
+ featurecache[kind] = fonts.otf.features.collect_ligatures(tfmdata,kind) -- double cached ?
end
local lookups = otfdata.luatex.internals[lookups[1]].lookups
local ligaturecache = featurecache[kind]
+ local trace = fonts.otf.trace_ligatures
for i=1,#lookups do
local ligatures = ligaturecache[lookups[i]]
if ligatures and ligatures[start.char] then
ligatures = ligatures[start.char]
- local s = start.next
+ local s, discfound = start.next, false
while s do
- if characters[s.char].class == 'mark' then
+ local id = s.id
+ if id == disc then
+ s = s.next
+ discfound = true
+ elseif characters[s.char].class == 'mark' then
s = s.next
else
local lg = ligatures[1][s.char]
@@ -2865,10 +3115,10 @@ do
end
end
if ligatures[2] then
- if fonts.otf.trace_ligatures then
+ if trace then
report("otf chain",format("%s: replacing character %s by ligature",kind,start.char))
end
- return toligature(start,stop,ligatures[2],flags[1])
+ return toligature(start,stop,ligatures[2],flags[1],discfound)
end
break
end
@@ -2879,7 +3129,7 @@ do
function chainprocs.gpos_mark2base(start,stop,kind,lookupname,sequence,lookups)
local component = start.next
- if component and component.id == glyph and component.font == currentfont and marks[component.char] then
+ if component and component.id == glyph and component.subtype<256 and component.font == currentfont and marks[component.char] then
local char = start.char
local anchortag = sequence[1][char]
if anchortag == true then
@@ -2899,6 +3149,7 @@ do
local trace = fonts.otf.trace_anchors
local last, done = start, false
local baseanchors = glyph.anchors['basechar'][anchortag]
+ local factor = tfmdata.factor
while true do
local nextchar = component.char
local charnext = characters[nextchar]
@@ -2907,8 +3158,8 @@ do
for anchor,data in pairs(markanchors) do
local ba = baseanchors[anchor]
if ba then
- local dx = tex.scale(ba.x-data.x, tfmdata.factor)
- local dy = tex.scale(ba.y-data.y, tfmdata.factor)
+ local dx = scale(ba.x-data.x, factor)
+ local dy = scale(ba.y-data.y, factor)
component.xoffset = start.xoffset - dx
component.yoffset = start.yoffset + dy
if trace then
@@ -2921,7 +3172,7 @@ do
end
last = component
component = component.next
- if component and component.id == glyph and component.font == currentfont and marks[component.char] then
+ if component and component.id == glyph and component.subtype<256 and component.font == currentfont and marks[component.char] then
-- ok
else
break
@@ -2936,7 +3187,7 @@ do
function chainprocs.gpos_mark2ligature(start,stop,kind,lookupname,sequence,lookups)
local component = start.next
- if component and component.id == glyph and component.font == currentfont and marks[component.char] then
+ if component and component.id == glyph and component.subtype<256 and component.font == currentfont and marks[component.char] then
local char = start.char
local anchortag = sequence[1][char]
if anchortag == true then
@@ -2957,6 +3208,7 @@ do
local done = false
local last = start
local baseanchors = glyph.anchors['baselig'][anchortag]
+ local factor = tfmdata.factor
while true do
local nextchar = component.char
local charnext = characters[nextchar]
@@ -2968,8 +3220,8 @@ do
local n = has_attribute(component,marknumber)
local ban = ba[n]
if ban then
- local dx = tex.scale(ban.x-data.x, tfmdata.factor)
- local dy = tex.scale(ban.y-data.y, tfmdata.factor)
+ local dx = scale(ban.x-data.x, factor)
+ local dy = scale(ban.y-data.y, factor)
component.xoffset = start.xoffset - dx
component.yoffset = start.yoffset + dy
if trace then
@@ -2983,7 +3235,7 @@ do
end
last = component
component = component.next
- if component and component.id == glyph and component.font == currentfont and marks[component.char] then
+ if component and component.id == glyph and component.subtype<256 and component.font == currentfont and marks[component.char] then
-- ok
else
break
@@ -2998,7 +3250,7 @@ do
function chainprocs.gpos_mark2mark(start,stop,kind,lookupname,sequence,lookups)
local component = start.next
- if component and component.id == glyph and component.font == currentfont and marks[component.char] then
+ if component and component.id == glyph and component.subtype<256 and component.font == currentfont and marks[component.char] then
local char = start.char
local anchortag = sequence[1][char]
if anchortag == true then
@@ -3020,6 +3272,7 @@ do
local trace = fonts.otf.trace_anchors
local last, done = false
local baseanchors = glyph.anchors['basemark'][anchortag]
+ local factor = tfmdata.factor
while true do
local nextchar = component.char
local charnext = characters[nextchar]
@@ -3028,8 +3281,8 @@ do
for anchor,data in pairs(markanchors) do
local ba = baseanchors[anchor]
if ba then
- local dx = tex.scale(ba.x-data.x, tfmdata.factor)
- local dy = tex.scale(ba.y-data.y, tfmdata.factor)
+ local dx = scale(ba.x-data.x, factor)
+ local dy = scale(ba.y-data.y, factor)
component.xoffset = start.xoffset - dx
component.yoffset = start.yoffset + dy
if trace then
@@ -3042,7 +3295,7 @@ do
end
last = component
component = component.next
- if component and component.id == glyph and component.font == currentfont and marks[component.char] then
+ if component and component.id == glyph and component.subtype<256 and component.font == currentfont and marks[component.char] then
markattr = has_attribute(component,marknumber)
if baseattr ~= markattr then
break
@@ -3089,15 +3342,22 @@ do
match = sequence[1][start.char]
else -- n = #sequence -> faster
for n=1,#sequence do
- if stop and stop.id == glyph and stop.font == currentfont then
- local char = stop.char
- local class = characters[char].class
- if class == skipmark or class == skipligature or class == skipbase then
- -- skip 'm
- elseif sequence[n][char] then
- if n < #sequence then
- stop = stop.next
+ if stop then
+ local id = stop.id
+ if id == glyph and stop.subtype<256 and stop.font == currentfont then
+ local char = stop.char
+ local class = characters[char].class
+ if class == skipmark or class == skipligature or class == skipbase then
+ -- skip 'm
+ elseif sequence[n][char] then
+ if n < #sequence then
+ stop = stop.next
+ end
+ else
+ match = false break
end
+ elseif id == disc then -- what to do with kerns?
+ stop = stop.next
else
match = false break
end
@@ -3111,11 +3371,12 @@ do
local prev = start.prev
if prev then
if #before == 1 then
- match = prev.id == glyph and prev.font == currentfont and before[1][prev.char]
+ match = prev.id == glyph and prev.subtype<256 and prev.font == currentfont and before[1][prev.char]
else
for n=#before,1 do
if prev then
- if prev.id == glyph and prev.font == currentfont then -- normal char
+ local id = prev.id
+ if id == glyph and prev.subtype<256 and prev.font == currentfont then -- normal char
local char = prev.char
local class = characters[char].class
if class == skipmark or class == skipligature or class == skipbase then
@@ -3123,6 +3384,8 @@ do
elseif not before[n][char] then
match = false break
end
+ elseif id == disc then
+ -- skip 'm
elseif not before[n][32] then
match = false break
end
@@ -3146,11 +3409,12 @@ do
local next = stop.next
if next then
if #after == 1 then
- match = next.id == glyph and next.font == currentfont and after[1][next.char]
+ match = next.id == glyph and next.subtype<256 and next.font == currentfont and after[1][next.char]
else
for n=1,#after do
if next then
- if next.id == glyph and next.font == currentfont then -- normal char
+ local id = next.id
+ if id == glyph and next.subtype<256 and next.font == currentfont then -- normal char
local char = next.char
local class = characters[char].class
if class == skipmark or class == skipligature or class == skipbase then
@@ -3158,6 +3422,8 @@ do
elseif not after[n][char] then
match = false break
end
+ elseif id == disc then
+ -- skip 'm
elseif not after[n][32] then -- brrr
match = false break
end
@@ -3180,6 +3446,7 @@ do
if match then
local trace = fonts.otf.trace_contexts
if trace then
+ local char = start.char
report("otf chain",format("%s: rule %s of %s matches %s times at char %s (%s) lookuptype %s",kind,rule,lookupname,#sequence,char,utf.char(char),lookuptype))
end
if lookups then
@@ -3214,11 +3481,12 @@ do
local prev = start.prev
if prev then
if #after == 1 then
- match = prev.id == glyph and prev.font == currentfont and after[1][prev.char]
+ match = prev.id == glyph and prev.subtype<256 and prev.font == currentfont and after[1][prev.char]
else
for n=1,#after do
if prev then
- if prev.id == glyph and prev.font == currentfont then -- normal char
+ local id = prev.id
+ if id == glyph and prev.subtype<256 and prev.font == currentfont then -- normal char
local char = prev.char
local class = characters[char].class
if class == skipmark or class == skipligature or class == skipbase then
@@ -3226,6 +3494,8 @@ do
elseif not after[n][char] then
match = false break
end
+ elseif id == disc then
+ -- skip 'm
elseif not after[n][32] then
match = false break
end
@@ -3249,11 +3519,12 @@ do
local next = stop.next
if next then
if #after == 1 then
- match = next.id == glyph and next.font == currentfont and before[1][next.char]
+ match = next.id == glyph and next.subtype<256 and next.font == currentfont and before[1][next.char]
else
for n=#before,1 do
if next then
- if next.id == glyph and next.font == currentfont then -- normal char
+ local id = next.id
+ if id == glyph and next.subtype<256 and next.font == currentfont then -- normal char
local char = next.char
local class = characters[char].class
if class == skipmark or class == skipligature or class == skipbase then
@@ -3261,6 +3532,8 @@ do
elseif not before[n][char] then
match = false break
end
+ elseif id == disc then
+ -- skip 'm
elseif not before[n][32] then -- brrr
match = false break
end
@@ -3315,113 +3588,108 @@ do
local process = fonts.otf.features.process.feature
- function fonts.methods.node.otf.aalt(head,font) return process(head,font,'aalt') end
- function fonts.methods.node.otf.afrc(head,font) return process(head,font,'afrc') end
- function fonts.methods.node.otf.akhn(head,font) return process(head,font,'akhn') end
- function fonts.methods.node.otf.c2pc(head,font) return process(head,font,'c2pc') end
- function fonts.methods.node.otf.c2sc(head,font) return process(head,font,'c2sc') end
- function fonts.methods.node.otf.calt(head,font) return process(head,font,'calt') end
- function fonts.methods.node.otf.case(head,font) return process(head,font,'case') end
- function fonts.methods.node.otf.ccmp(head,font) return process(head,font,'ccmp') end
- function fonts.methods.node.otf.clig(head,font) return process(head,font,'clig') end
- function fonts.methods.node.otf.cpsp(head,font) return process(head,font,'cpsp') end
- function fonts.methods.node.otf.cswh(head,font) return process(head,font,'cswh') end
- function fonts.methods.node.otf.curs(head,font) return process(head,font,'curs') end
- function fonts.methods.node.otf.dlig(head,font) return process(head,font,'dlig') end
- function fonts.methods.node.otf.dnom(head,font) return process(head,font,'dnom') end
- function fonts.methods.node.otf.expt(head,font) return process(head,font,'expt') end
- function fonts.methods.node.otf.fin2(head,font) return process(head,font,'fin2') end
- function fonts.methods.node.otf.fin3(head,font) return process(head,font,'fin3') end
- function fonts.methods.node.otf.fina(head,font) return process(head,font,'fina',3) end
- function fonts.methods.node.otf.frac(head,font) return process(head,font,'frac') end
- function fonts.methods.node.otf.fwid(head,font) return process(head,font,'fwid') end
- function fonts.methods.node.otf.haln(head,font) return process(head,font,'haln') end
- function fonts.methods.node.otf.hist(head,font) return process(head,font,'hist') end
- function fonts.methods.node.otf.hkna(head,font) return process(head,font,'hkna') end
- function fonts.methods.node.otf.hlig(head,font) return process(head,font,'hlig') end
- function fonts.methods.node.otf.hngl(head,font) return process(head,font,'hngl') end
- function fonts.methods.node.otf.hwid(head,font) return process(head,font,'hwid') end
- function fonts.methods.node.otf.init(head,font) return process(head,font,'init',1) end
- function fonts.methods.node.otf.isol(head,font) return process(head,font,'isol',4) end
- function fonts.methods.node.otf.ital(head,font) return process(head,font,'ital') end
- function fonts.methods.node.otf.jp78(head,font) return process(head,font,'jp78') end
- function fonts.methods.node.otf.jp83(head,font) return process(head,font,'jp83') end
- function fonts.methods.node.otf.jp90(head,font) return process(head,font,'jp90') end
- function fonts.methods.node.otf.kern(head,font) return process(head,font,'kern') end
- function fonts.methods.node.otf.liga(head,font) return process(head,font,'liga') end
- function fonts.methods.node.otf.lnum(head,font) return process(head,font,'lnum') end
- function fonts.methods.node.otf.locl(head,font) return process(head,font,'locl') end
- function fonts.methods.node.otf.mark(head,font) return process(head,font,'mark') end
- function fonts.methods.node.otf.med2(head,font) return process(head,font,'med2') end
- function fonts.methods.node.otf.medi(head,font) return process(head,font,'medi',2) end
- function fonts.methods.node.otf.mgrk(head,font) return process(head,font,'mgrk') end
- function fonts.methods.node.otf.mkmk(head,font) return process(head,font,'mkmk') end
- function fonts.methods.node.otf.nalt(head,font) return process(head,font,'nalt') end
- function fonts.methods.node.otf.nlck(head,font) return process(head,font,'nlck') end
- function fonts.methods.node.otf.nukt(head,font) return process(head,font,'nukt') end
- function fonts.methods.node.otf.numr(head,font) return process(head,font,'numr') end
- function fonts.methods.node.otf.onum(head,font) return process(head,font,'onum') end
- function fonts.methods.node.otf.ordn(head,font) return process(head,font,'ordn') end
- function fonts.methods.node.otf.ornm(head,font) return process(head,font,'ornm') end
- function fonts.methods.node.otf.pnum(head,font) return process(head,font,'pnum') end
- function fonts.methods.node.otf.pref(head,font) return process(head,font,'pref') end
- function fonts.methods.node.otf.pres(head,font) return process(head,font,'pres') end
- function fonts.methods.node.otf.pstf(head,font) return process(head,font,'pstf') end
- function fonts.methods.node.otf.rlig(head,font) return process(head,font,'rlig') end
- function fonts.methods.node.otf.rphf(head,font) return process(head,font,'rphf') end
- function fonts.methods.node.otf.salt(head,font) return process(head,font,'calt') end
- function fonts.methods.node.otf.sinf(head,font) return process(head,font,'sinf') end
- function fonts.methods.node.otf.smcp(head,font) return process(head,font,'smcp') end
- function fonts.methods.node.otf.smpl(head,font) return process(head,font,'smpl') end
- function fonts.methods.node.otf.ss01(head,font) return process(head,font,'ss01') end
- function fonts.methods.node.otf.ss02(head,font) return process(head,font,'ss02') end
- function fonts.methods.node.otf.ss03(head,font) return process(head,font,'ss03') end
- function fonts.methods.node.otf.ss04(head,font) return process(head,font,'ss04') end
- function fonts.methods.node.otf.ss05(head,font) return process(head,font,'ss05') end
- function fonts.methods.node.otf.ss06(head,font) return process(head,font,'ss06') end
- function fonts.methods.node.otf.ss07(head,font) return process(head,font,'ss07') end
- function fonts.methods.node.otf.ss08(head,font) return process(head,font,'ss08') end
- function fonts.methods.node.otf.ss09(head,font) return process(head,font,'ss09') end
- function fonts.methods.node.otf.subs(head,font) return process(head,font,'subs') end
- function fonts.methods.node.otf.sups(head,font) return process(head,font,'sups') end
- function fonts.methods.node.otf.swsh(head,font) return process(head,font,'swsh') end
- function fonts.methods.node.otf.titl(head,font) return process(head,font,'titl') end
- function fonts.methods.node.otf.tnam(head,font) return process(head,font,'tnam') end
- function fonts.methods.node.otf.tnum(head,font) return process(head,font,'tnum') end
- function fonts.methods.node.otf.trad(head,font) return process(head,font,'trad') end
- function fonts.methods.node.otf.unic(head,font) return process(head,font,'unic') end
- function fonts.methods.node.otf.zero(head,font) return process(head,font,'zero') end
+ function fonts.methods.node.otf.aalt(head,font,attr) return process(head,font,attr,'aalt') end
+ function fonts.methods.node.otf.afrc(head,font,attr) return process(head,font,attr,'afrc') end
+ function fonts.methods.node.otf.akhn(head,font,attr) return process(head,font,attr,'akhn') end
+ function fonts.methods.node.otf.c2pc(head,font,attr) return process(head,font,attr,'c2pc') end
+ function fonts.methods.node.otf.c2sc(head,font,attr) return process(head,font,attr,'c2sc') end
+ function fonts.methods.node.otf.calt(head,font,attr) return process(head,font,attr,'calt') end
+ function fonts.methods.node.otf.case(head,font,attr) return process(head,font,attr,'case') end
+ function fonts.methods.node.otf.ccmp(head,font,attr) return process(head,font,attr,'ccmp') end
+ function fonts.methods.node.otf.clig(head,font,attr) return process(head,font,attr,'clig') end
+ function fonts.methods.node.otf.cpsp(head,font,attr) return process(head,font,attr,'cpsp') end
+ function fonts.methods.node.otf.cswh(head,font,attr) return process(head,font,attr,'cswh') end
+ function fonts.methods.node.otf.curs(head,font,attr) return process(head,font,attr,'curs') end
+ function fonts.methods.node.otf.dlig(head,font,attr) return process(head,font,attr,'dlig') end
+ function fonts.methods.node.otf.dnom(head,font,attr) return process(head,font,attr,'dnom') end
+ function fonts.methods.node.otf.expt(head,font,attr) return process(head,font,attr,'expt') end
+ function fonts.methods.node.otf.fin2(head,font,attr) return process(head,font,attr,'fin2') end
+ function fonts.methods.node.otf.fin3(head,font,attr) return process(head,font,attr,'fin3') end
+ function fonts.methods.node.otf.fina(head,font,attr) return process(head,font,attr,'fina',3) end
+ function fonts.methods.node.otf.frac(head,font,attr) return process(head,font,attr,'frac') end
+ function fonts.methods.node.otf.fwid(head,font,attr) return process(head,font,attr,'fwid') end
+ function fonts.methods.node.otf.haln(head,font,attr) return process(head,font,attr,'haln') end
+ function fonts.methods.node.otf.hist(head,font,attr) return process(head,font,attr,'hist') end
+ function fonts.methods.node.otf.hkna(head,font,attr) return process(head,font,attr,'hkna') end
+ function fonts.methods.node.otf.hlig(head,font,attr) return process(head,font,attr,'hlig') end
+ function fonts.methods.node.otf.hngl(head,font,attr) return process(head,font,attr,'hngl') end
+ function fonts.methods.node.otf.hwid(head,font,attr) return process(head,font,attr,'hwid') end
+ function fonts.methods.node.otf.init(head,font,attr) return process(head,font,attr,'init',1) end
+ function fonts.methods.node.otf.isol(head,font,attr) return process(head,font,attr,'isol',4) end
+ function fonts.methods.node.otf.ital(head,font,attr) return process(head,font,attr,'ital') end
+ function fonts.methods.node.otf.jp78(head,font,attr) return process(head,font,attr,'jp78') end
+ function fonts.methods.node.otf.jp83(head,font,attr) return process(head,font,attr,'jp83') end
+ function fonts.methods.node.otf.jp90(head,font,attr) return process(head,font,attr,'jp90') end
+ function fonts.methods.node.otf.kern(head,font,attr) return process(head,font,attr,'kern') end
+ function fonts.methods.node.otf.liga(head,font,attr) return process(head,font,attr,'liga') end
+ function fonts.methods.node.otf.lnum(head,font,attr) return process(head,font,attr,'lnum') end
+ function fonts.methods.node.otf.locl(head,font,attr) return process(head,font,attr,'locl') end
+ function fonts.methods.node.otf.mark(head,font,attr) return process(head,font,attr,'mark') end
+ function fonts.methods.node.otf.med2(head,font,attr) return process(head,font,attr,'med2') end
+ function fonts.methods.node.otf.medi(head,font,attr) return process(head,font,attr,'medi',2) end
+ function fonts.methods.node.otf.mgrk(head,font,attr) return process(head,font,attr,'mgrk') end
+ function fonts.methods.node.otf.mkmk(head,font,attr) return process(head,font,attr,'mkmk') end
+ function fonts.methods.node.otf.nalt(head,font,attr) return process(head,font,attr,'nalt') end
+ function fonts.methods.node.otf.nlck(head,font,attr) return process(head,font,attr,'nlck') end
+ function fonts.methods.node.otf.nukt(head,font,attr) return process(head,font,attr,'nukt') end
+ function fonts.methods.node.otf.numr(head,font,attr) return process(head,font,attr,'numr') end
+ function fonts.methods.node.otf.onum(head,font,attr) return process(head,font,attr,'onum') end
+ function fonts.methods.node.otf.ordn(head,font,attr) return process(head,font,attr,'ordn') end
+ function fonts.methods.node.otf.ornm(head,font,attr) return process(head,font,attr,'ornm') end
+ function fonts.methods.node.otf.pnum(head,font,attr) return process(head,font,attr,'pnum') end
+ function fonts.methods.node.otf.pref(head,font,attr) return process(head,font,attr,'pref') end
+ function fonts.methods.node.otf.pres(head,font,attr) return process(head,font,attr,'pres') end
+ function fonts.methods.node.otf.pstf(head,font,attr) return process(head,font,attr,'pstf') end
+ function fonts.methods.node.otf.rlig(head,font,attr) return process(head,font,attr,'rlig') end
+ function fonts.methods.node.otf.rphf(head,font,attr) return process(head,font,attr,'rphf') end
+ function fonts.methods.node.otf.salt(head,font,attr) return process(head,font,attr,'calt') end
+ function fonts.methods.node.otf.sinf(head,font,attr) return process(head,font,attr,'sinf') end
+ function fonts.methods.node.otf.smcp(head,font,attr) return process(head,font,attr,'smcp') end
+ function fonts.methods.node.otf.smpl(head,font,attr) return process(head,font,attr,'smpl') end
+ function fonts.methods.node.otf.ss01(head,font,attr) return process(head,font,attr,'ss01') end
+ function fonts.methods.node.otf.ss02(head,font,attr) return process(head,font,attr,'ss02') end
+ function fonts.methods.node.otf.ss03(head,font,attr) return process(head,font,attr,'ss03') end
+ function fonts.methods.node.otf.ss04(head,font,attr) return process(head,font,attr,'ss04') end
+ function fonts.methods.node.otf.ss05(head,font,attr) return process(head,font,attr,'ss05') end
+ function fonts.methods.node.otf.ss06(head,font,attr) return process(head,font,attr,'ss06') end
+ function fonts.methods.node.otf.ss07(head,font,attr) return process(head,font,attr,'ss07') end
+ function fonts.methods.node.otf.ss08(head,font,attr) return process(head,font,attr,'ss08') end
+ function fonts.methods.node.otf.ss09(head,font,attr) return process(head,font,attr,'ss09') end
+ function fonts.methods.node.otf.subs(head,font,attr) return process(head,font,attr,'subs') end
+ function fonts.methods.node.otf.sups(head,font,attr) return process(head,font,attr,'sups') end
+ function fonts.methods.node.otf.swsh(head,font,attr) return process(head,font,attr,'swsh') end
+ function fonts.methods.node.otf.titl(head,font,attr) return process(head,font,attr,'titl') end
+ function fonts.methods.node.otf.tnam(head,font,attr) return process(head,font,attr,'tnam') end
+ function fonts.methods.node.otf.tnum(head,font,attr) return process(head,font,attr,'tnum') end
+ function fonts.methods.node.otf.trad(head,font,attr) return process(head,font,attr,'trad') end
+ function fonts.methods.node.otf.unic(head,font,attr) return process(head,font,attr,'unic') end
+ function fonts.methods.node.otf.zero(head,font,attr) return process(head,font,attr,'zero') end
end
---~ function fonts.initializers.node.otf.install(feature,attribute)
---~ function fonts.initializers.node.otf[feature](tfm,value) return fonts.otf.features.prepare.feature(tfm,feature,value) end
---~ function fonts.methods.node.otf[feature] (head,font) return fonts.otf.features.process.feature(head,font,feature,attribute) end
---~ end
-
-- common stuff
-function fonts.otf.features.language(tfm,value)
+function fonts.otf.features.language(tfmdata,value)
if value then
value = value:lower()
if fonts.otf.tables.languages[value] then
- tfm.language = value
+ tfmdata.language = value
end
end
end
-function fonts.otf.features.script(tfm,value)
+function fonts.otf.features.script(tfmdata,value)
if value then
value = value:lower()
if fonts.otf.tables.scripts[value] then
- tfm.script = value
+ tfmdata.script = value
end
end
end
-function fonts.otf.features.mode(tfm,value)
+function fonts.otf.features.mode(tfmdata,value)
if value then
- tfm.mode = value:lower()
+ tfmdata.mode = value:lower()
end
end
@@ -3435,10 +3703,11 @@ fonts.initializers.node.otf.script = fonts.otf.features.script
fonts.initializers.node.otf.mode = fonts.otf.features.mode
fonts.initializers.node.otf.method = fonts.otf.features.mode
-fonts.initializers.node.otf.trep = fonts.initializers.base.otf.trep
-fonts.initializers.node.otf.tlig = fonts.initializers.base.otf.tlig
-fonts.initializers.node.otf.texquotes = fonts.initializers.base.otf.texquotes
-fonts.initializers.node.otf.texligatures = fonts.initializers.base.otf.texligatures
+--~ fonts.initializers.node.otf.trep = fonts.initializers.base.otf.trep
+--~ fonts.initializers.node.otf.tlig = fonts.initializers.base.otf.tlig
+
+--~ fonts.methods.node.otf.trep = function(head,font,attr) return process(head,font,attr,'trep') end
+--~ fonts.methods.node.otf.tlig = function(head,font,attr) return process(head,font,attr,'tlig') end
-- we need this because fonts can be bugged
@@ -3553,28 +3822,47 @@ do
-- font related value, but then we also need dynamic features which is
-- somewhat slower; and .. we need a chain of them
- function fonts.initializers.node.otf.analyze(tfm,value)
- local script, language = tfm.script, tfm.language
- local action = fonts.analyzers.initializers[script]
+ local type = type
+
+ local initializers, methods = fonts.analyzers.initializers, fonts.analyzers.methods
+
+ function fonts.initializers.node.otf.analyze(tfmdata,value,attr)
+ if attr and attr > 0 then
+ script, language = a_to_script[attr], a_to_language[attr]
+ else
+ script, language = tfmdata.script, tfmdata.language
+ end
+ local action = initializers[script]
if action then
if type(action) == "function" then
- return action(tfm,value)
- elseif action[language] then
- return action[language](tfm,value)
+ return action(tfmdata,value)
+ else
+ local action = action[language]
+ if action then
+ return action(tfmdata,value)
+ end
end
end
return nil
end
- function fonts.methods.node.otf.analyze(head,font)
+ function fonts.methods.node.otf.analyze(head,font,attr)
local tfmdata = fontdata[font]
- local script, language = fontdata[font].script, fontdata[font].language
- local action = fonts.analyzers.methods[script]
+ local script, language
+ if attr and attr > 0 then
+ script, language = a_to_script[attr], a_to_language[attr]
+ else
+ script, language = tfmdata.script, tfmdata.language
+ end
+ local action = methods[script]
if action then
if type(action) == "function" then
- return action(head,font)
- elseif action[language] then
- return action[language](head,font)
+ return action(head,font,attr)
+ else
+ action = action[language]
+ if action then
+ return action(head,font,attr)
+ end
end
end
return head, false
@@ -3623,8 +3911,8 @@ do
local fcs = fonts.color.set
local fcr = fonts.color.reset
- function fonts.analyzers.methods.nocolor(head,font)
- for n in nodes.traverse(glyph) do
+ function fonts.analyzers.methods.nocolor(head,font,attr)
+ for n in node.traverse(head,glyph) do
if not font or n.font == font then
fcr(n)
end
@@ -3632,7 +3920,7 @@ do
return head, true
end
- function fonts.analyzers.methods.arab(head,font) -- maybe make a special version with no trace
+ function fonts.analyzers.methods.arab(head,font,attr) -- maybe make a special version with no trace
local characters = fontdata[font].characters
local first, last, current, done = nil, nil, head, false
local trace = fonts.color.trace
@@ -3675,10 +3963,13 @@ do
--~ laststate = 0
end
while current do
- if current.id == glyph and current.font == font then
+ if current.id == glyph and current.subtype<256 and current.font == font then
done = true
local char = current.char
- if characters[char].class == "mark" then -- marks are now in components
+ local chardata = characters[char]
+ if not chardata then
+ -- troubles
+ elseif chardata.class == "mark" then -- marks are now in components
set_attribute(current,state,5) -- mark
if trace then fcs(current,"font:mark") end
elseif isol[char] then
@@ -3810,8 +4101,8 @@ do
-- will move to node-ini :
- local allowbreak = node.new("penalty") allowbreak.penalty = -100
- local nobreak = node.new("penalty") nobreak.penalty = 10000
+ local allowbreak = nodes.penalty( -100) nodes.register(allowbreak)
+ local nobreak = nodes.penalty( 10000) nodes.register(nobreak)
fonts.analyzers.methods.stretch_hang = true
@@ -3819,7 +4110,26 @@ do
-- it wil become either a mkiv feature or an attribute, so this is
-- experimental
- function fonts.analyzers.methods.hang(head,font) -- maybe make a special version with no trace
+--~ local function nodes.replace(head,current,newnode)
+--~ local oldnode = current
+--~ newnode.prev, newnode.next = oldnode.prev, oldnode.next
+--~ if oldnode.prev then
+--~ old.prev.next = newnode
+--~ end
+--~ if oldnode.next then
+--~ old.next.prev = newnode
+--~ end
+--~ if head == current then
+--~ head = newnode
+--~ end
+--~ node.free(oldnode)
+--~ return head, newnode
+--~ end
+--~ if char == 0x3000 then
+--~ head, current = node.replace(head,current,nodes.glue(fontdata[font].parameter[6],0,0))
+--~ end
+
+ function fonts.analyzers.methods.hang(head,font,attr) -- maybe make a special version with no trace
local characters = fontdata[font].characters
local current, last, done, stretch, prevchinese = head, nil, false, 0, false
local trace = fonts.color.trace
@@ -3827,7 +4137,7 @@ do
stretch = fontdata[font].parameters[6]
end
while current do
- if current.id == glyph then
+ if current.id == glyph and current.subtype<256 then
if current.font == font then
if prevchinese then
local temp = current.prev
@@ -3868,7 +4178,7 @@ do
elseif hyphenation[char] then
set_attribute(current,state,3) -- xxxx
local prev, next = current.prev, current.next
- if next and next.id == glyph and hyphenation[next.char] then
+ if next and next.id == glyph and next.subtype<256 and hyphenation[next.char] then
if trace then fcs(current,"font:medi") fcs(next,"font:medi")end -- we need nice names
if prev then
if prevchinese then
@@ -3894,24 +4204,31 @@ do
return head, done
end
-
-
fonts.analyzers.methods.hani = fonts.analyzers.methods.hang
end
-- experimental and will probably change
-function fonts.install_feature(type,...)
- if fonts[type] and fonts[type].install_feature then
- fonts[type].install_feature(...)
+do
+ local process = fonts.otf.features.process.feature
+ local prepare = fonts.otf.features.prepare.feature
+ function fonts.install_feature(type,...)
+ if fonts[type] and fonts[type].install_feature then
+ fonts[type].install_feature(...)
+ end
+ end
+ function fonts.otf.install_feature(tag)
+ fonts.methods.node.otf [tag] = function(head,font,attr) return process(head,font,attr,tag) end
+ fonts.initializers.node.otf[tag] = function(tfm,value) return prepare(tfm,tag,value) end
end
-end
-function fonts.otf.install_feature(tag)
- fonts.methods.node.otf [tag] = function(head,font) return fonts.otf.features.process.feature(head,font,tag) end
- fonts.initializers.node.otf[tag] = function(tfm,value) return fonts.otf.features.prepare.feature(tfm,tag,value) end
end
+-- todo: always load texhistoric
+
+fonts.install_feature("otf","tlig")
+fonts.install_feature("otf","trep")
+
--~ exclam + quoteleft => exclamdown
--~ question + quoteleft => questiondown
@@ -3934,3 +4251,9 @@ end
--~ hyphen + hyphen => endash
--~ endash + hyphen => emdash
+-- this is a hack, currently featurefiles erase existing features
+
+fonts.initializers.node.otf.tlig = fonts.initializers.base.otf.tlig
+fonts.initializers.node.otf.trep = fonts.initializers.base.otf.trep
+fonts.methods.node.otf ['tlig'] = nil
+fonts.methods.node.otf ['trep'] = nil
diff --git a/tex/context/base/font-syn.lua b/tex/context/base/font-syn.lua
index 16910a8fd..ddc2924fc 100644
--- a/tex/context/base/font-syn.lua
+++ b/tex/context/base/font-syn.lua
@@ -127,7 +127,7 @@ function fonts.names.identify()
end
end
local function identify(completename,name,suffix)
- if not done[name] then
+ if not done[name] and io.exists(completename) then
nofread = nofread + 1
logs.info("fontnames", "identifying " .. suffix .. " font " .. completename)
logs.push()
diff --git a/tex/context/base/font-tfm.lua b/tex/context/base/font-tfm.lua
index 67a7866c5..abe3eaf36 100644
--- a/tex/context/base/font-tfm.lua
+++ b/tex/context/base/font-tfm.lua
@@ -86,6 +86,7 @@ function fonts.tfm.read_from_tfm(specification)
tfmdata.fonts = vfdata.fonts
end
end
+--~ print(table.serialize(tfmdata))
end
fonts.tfm.enhance(tfmdata,specification)
end
@@ -152,67 +153,76 @@ end
-- if t.tounicode = 1 then also characters[n].tounicode = "string"
-function fonts.tfm.scale(tfmtable, scaledpoints)
+function fonts.tfm.do_scale(tfmtable, scaledpoints)
+ -- beware, the boundingbox is passed as reference so we may not overwrite it
+ -- in the process, numbers are of course copies
+ --
-- 65536 = 1pt
-- 1000 units per designsize (not always)
local scale, round = tex.scale, tex.round -- replaces math.floor(n*m+0.5)
- local delta
if scaledpoints < 0 then
scaledpoints = (- scaledpoints/1000) * tfmtable.designsize -- already in sp
end
- delta = scaledpoints/(tfmtable.units or 1000) -- brr, some open type fonts have 2048
+ local delta = scaledpoints/(tfmtable.units or 1000) -- brr, some open type fonts have 2048
local t = { }
t.factor = delta
for k,v in pairs(tfmtable) do
- if type(v) == "table" then
- t[k] = { }
- else
- t[k] = v
- end
+ t[k] = (type(v) == "table" and { }) or v
end
local tc = t.characters
+ local trace = fonts.trace
for k,v in pairs(tfmtable.characters) do
+ local w, h, d = v.width, v.height, v.depth
local chr = {
unicode = v.unicode,
name = v.name,
index = v.index or k,
- width = scale(v.width , delta),
- height = scale(v.height, delta),
- depth = scale(v.depth , delta),
+ width = (w == 0 and 0) or scale(w, delta),
+ height = (h == 0 and 0) or scale(h, delta),
+ depth = (d == 0 and 0) or scale(d, delta),
class = v.class
}
-if fonts.trace then
- logs.report("define font", string.format("n=%s, u=%s, i=%s, n=%s c=%s",k,v.unicode,v.index,v.name or '-',v.class or '-'))
-end
- local b = v.boundingbox -- maybe faster to have llx etc not in table
- if b then
- chr.boundingbox = scale(v.boundingbox,delta)
+ if trace then
+ logs.report("define font", string.format("n=%s, u=%s, i=%s, n=%s c=%s",k,v.unicode,v.index,v.name or '-',v.class or '-'))
end
- if v.italic then
- chr.italic = scale(v.italic,delta)
+ local vb = v.boundingbox
+ if vb then
+ chr.boundingbox = scale(vb,delta)
end
- if v.kerns then
- chr.kerns = scale(v.kerns,delta)
+ local vi = v.italic
+ if vi then
+ chr.italic = scale(vi,delta)
end
- if v.ligatures then
- local tt = { }
- for kk,vv in pairs(v.ligatures) do
- tt[kk] = vv
+ local vk = v.kerns
+ if vk then
+ chr.kerns = scale(vk,delta)
+ end
+ local vl = v.ligatures
+ if vl then
+ if true then
+ chr.ligatures = v.ligatures -- shared
+ else
+ local tt = { }
+ for i,l in pairs(vl) do
+ tt[i] = l
+ end
+ chr.ligatures = tt
end
- chr.ligatures = tt
end
- if v.commands then
+ local vc = v.commands
+ if vc then
-- we assume non scaled commands here
- local vc, tt = v.commands, { }
+ local tt = { }
for i=1,#vc do
local ivc = vc[i]
local key = ivc[1]
- if key == "right" or key == "left" then
+ if key == "right" or key == "left" or key == "down" or key == "up" then
tt[#tt+1] = { key, scale(ivc[2],delta) }
else
tt[#tt+1] = ivc -- shared since in cache and untouched
end
end
+--~ print(table.serialize(vc),table.serialize(tt))
chr.commands = tt
end
tc[k] = chr
@@ -226,19 +236,29 @@ end
end
end
-- t.encodingbytes, t.filename, t.fullname, t.name: elsewhere
- t.size = scaledpoints
- t.italicangle = tfmtable.italicangle
- t.ascender = scale(tfmtable.ascender or 0,delta)
- t.descender = scale(tfmtable.descender or 0,delta)
- t.shared = tfmtable.shared or { }
- if t.unique then
- t.unique = table.fastcopy(tfmtable.unique)
- else
- t.unique = { }
- end
+ t.size = scaledpoints
if t.fonts then
t.fonts = table.fastcopy(t.fonts) -- maybe we virtualize more afterwards
end
+ return t, delta
+end
+
+--[[ldx--
+<p>The reason why the scaler is split, is that for a while we experimented
+with a helper function. However, in practice the <l n='api'/> calls are too slow to
+make this profitable and the <l n='lua'/> based variant was just faster. A days
+wasted day but an experience richer.</p>
+--ldx]]--
+
+function fonts.tfm.scale(tfmtable, scaledpoints)
+ local scale = tex.scale
+ local t, factor = fonts.tfm.do_scale(tfmtable, scaledpoints)
+ t.factor = factor
+ t.ascender = scale(tfmtable.ascender or 0, factor)
+ t.descender = scale(tfmtable.descender or 0, factor)
+ t.shared = tfmtable.shared or { }
+ t.unique = table.fastcopy(tfmtable.unique or {})
+--~ print("scaling", t.name, t.factor) -- , fonts.tfm.hash_features(tfmtable.specification))
return t
end
@@ -372,7 +392,9 @@ function fonts.initializers.common.complement(tfmdata,value) -- todo: value = la
if value then
local chr, index, data, get_virtual_id = tfmdata.characters, nil, characters.data, fonts.tfm.get_virtual_id
local selection = fonts.initializers.complements.load("LATIN") -- will be value
- for _, k in ipairs(selection) do
+ -- for _, k in ipairs(selection) do
+ for i=1,#selection do
+ local k = selection[i]
if not chr[k] then
local dk = data[k]
local vs, name = dk.specials, dk.adobename
@@ -520,8 +542,8 @@ do
local glyph = node.id('glyph')
local fontdata = fonts.tfm.id
local set_attribute = node.set_attribute
- local unset_attribute = node.unset_attribute
- local has_attribute = node.has_attribute
+-- local unset_attribute = node.unset_attribute
+-- local has_attribute = node.has_attribute
local state = attributes.numbers['state'] or 100
@@ -533,14 +555,6 @@ do
function fonts.analyzers.aux.setstate(head,font)
local characters = fontdata[font].characters
local first, last, current, n, done = nil, nil, head, 0, false -- maybe make n boolean
- local function finish()
- if first and first == last then
- set_attribute(last,state,4) -- isol
- elseif last then
- set_attribute(last,state,3) -- fina
- end
- first, last, n = nil, nil, 0
- end
while current do
if current.id == glyph and current.font == font then
if characters[current.char].class == "mark" then
@@ -553,12 +567,21 @@ do
last, n = current, n+1
set_attribute(current,state,2) -- medi
end
- else
- finish()
+ else -- finish
+ if first and first == last then
+ set_attribute(last,state,4) -- isol
+ elseif last then
+ set_attribute(last,state,3) -- fina
+ end
+ first, last, n = nil, nil, 0
end
current = current.next
end
- finish()
+ if first and first == last then
+ set_attribute(last,state,4) -- isol
+ elseif last then
+ set_attribute(last,state,3) -- fina
+ end
return head, done
end
@@ -585,7 +608,7 @@ do
-- check if head
if last and not last.components then
last.components = current
- last.components.prev = nil
+ current.prev = nil -- last.components.prev = nil
done = true
n = 1
else
diff --git a/tex/context/base/font-vf.lua b/tex/context/base/font-vf.lua
index f0258e281..7070acca1 100644
--- a/tex/context/base/font-vf.lua
+++ b/tex/context/base/font-vf.lua
@@ -67,14 +67,37 @@ function fonts.vf.aux.combine.load(g,name)
return fonts.tfm.read_and_define(name or g.specification.name,g.specification.size)
end
+function fonts.vf.aux.combine.names(g,name,force)
+ local f, id = fonts.tfm.read_and_define(name,g.specification.size)
+ if f and id then
+ local fc, gc = f.characters, g.characters
+ g.fonts[#g.fonts+1] = { id = id } -- no need to be sparse
+ local hn = #g.fonts
+ for k, v in pairs(fc) do
+ if force or not gc[k] then
+ gc[k] = table.fastcopy(v)
+ gc[k].commands = { { 'slot', hn, k } }
+ end
+ end
+ if not g.parameters and #g.fonts > 0 then -- share this code !
+ g.parameters = table.fastcopy(f.parameters)
+ g.italicangle = f.italicangle
+ g.ascender = f.ascender
+ g.descender = f.descender
+ end
+ end
+end
+
fonts.vf.aux.combine.commands = {
- ["initialize"] = function(g,v) fonts.vf.aux.combine.assign(g, g.name) end,
- ["include-method"] = function(g,v) fonts.vf.aux.combine.process(g,fonts.vf.combinations[v[2]]) end, -- name
+ ["initialize"] = function(g,v) fonts.vf.aux.combine.assign (g,g.name) end,
+ ["include-method"] = function(g,v) fonts.vf.aux.combine.process (g,fonts.vf.combinations[v[2]]) end, -- name
["copy-parameters"] = function(g,v) fonts.vf.aux.combine.parameters(g,v[2]) end, -- name
- ["copy-range"] = function(g,v) fonts.vf.aux.combine.assign(g,v[2],v[3],v[4],v[5],true) end, -- name, from-start, from-end, to-start
- ["copy-char"] = function(g,v) fonts.vf.aux.combine.assign(g,v[2],v[3],v[3],v[4],true) end, -- name, from, to
- ["fallback-range"] = function(g,v) fonts.vf.aux.combine.assign(g,v[2],v[3],v[4],v[5],false) end, -- name, from-start, from-end, to-start
- ["fallback-char"] = function(g,v) fonts.vf.aux.combine.assign(g,v[2],v[3],v[3],v[4],false) end, -- name, from, to
+ ["copy-range"] = function(g,v) fonts.vf.aux.combine.assign (g,v[2],v[3],v[4],v[5],true) end, -- name, from-start, from-end, to-start
+ ["copy-char"] = function(g,v) fonts.vf.aux.combine.assign (g,v[2],v[3],v[3],v[4],true) end, -- name, from, to
+ ["fallback-range"] = function(g,v) fonts.vf.aux.combine.assign (g,v[2],v[3],v[4],v[5],false) end, -- name, from-start, from-end, to-start
+ ["fallback-char"] = function(g,v) fonts.vf.aux.combine.assign (g,v[2],v[3],v[3],v[4],false) end, -- name, from, to
+ ["copy_names"] = function(g,v) fonts.vf.aux.combine.names (g,v[2],true) end,
+ ["fallback_names"] = function(g,v) fonts.vf.aux.combine.names (g,v[2],false) end,
}
function fonts.vf.combine(specification,tag)
@@ -120,6 +143,15 @@ fonts.define.methods.install(
--~ }
--~ )
+--~ fonts.define.methods.install(
+--~ "lmsymbol10", {
+--~ { "fallback_names", "lmsy10.afm" } ,
+--~ { "fallback_names", "msam10.afm" } ,
+--~ { "fallback_names", "msbm10.afm" }
+--~ }
+--~ )
+--~ \font\TestFont=dummy@lmsymbol10 at 24pt
+
-- docu case
--~ fonts.define.methods.install(
diff --git a/tex/context/base/l-aux.lua b/tex/context/base/l-aux.lua
index f5aa7e67e..2c98a2f35 100644
--- a/tex/context/base/l-aux.lua
+++ b/tex/context/base/l-aux.lua
@@ -14,19 +14,19 @@ do
hash[key] = value
end
- local space = lpeg.S(' ')^0
- local equal = lpeg.S("=")^1
- local comma = lpeg.S(",")^0
- local nonspace = lpeg.P(1-lpeg.S(' '))^1
- local nonequal = lpeg.P(1-lpeg.S('='))^1
- local noncomma = lpeg.P(1-lpeg.S(','))^1
- local nonbrace = lpeg.P(1-lpeg.S('{}'))^1
- local nested = lpeg.S('{') * lpeg.C(nonbrace^1) * lpeg.S('}')
+ local space = lpeg.P(' ')
+ local equal = lpeg.P("=")
+ local comma = lpeg.P(",")
+ local lbrace = lpeg.P("{")
+ local rbrace = lpeg.P("}")
+ local nobrace = 1 - (lbrace+rbrace)
+ local nested = lpeg.P{ lbrace * (nobrace + lpeg.V(1))^0 * rbrace }
- local key = lpeg.C(nonequal)
- local value = nested + lpeg.C(noncomma)
+ local key = lpeg.C((1-equal)^1)
+ local value = lpeg.P(lbrace * lpeg.C((nobrace + nested)^0) * rbrace) + lpeg.C((nested + (1-comma))^0)
+ local pattern = ((space^0 * key * equal * value * comma^0) / set)^1
- local pattern = ((space * key * equal * value * comma) / set)^1
+ -- "a=1, b=2, c=3, d={a{b,c}d}, e=12345, f=xx{a{b,c}d}xx, g={}" : outer {} removes, leading spaces ignored
function aux.settings_to_hash(str)
hash = { }
@@ -34,7 +34,11 @@ do
return hash
end
- local pattern = lpeg.Ct((space * value * comma)^1)
+ local seperator = comma * space^0
+ local value = lbrace * lpeg.C(nobrace^0) * rbrace + lpeg.C((1-seperator)^0)
+ local pattern = lpeg.Ct(value*(seperator*value)^0)
+
+ -- "aap, {noot}, mies" : outer {} removes, leading spaces ignored
function aux.settings_to_array(str)
return lpeg.match(pattern,str)
@@ -42,30 +46,27 @@ do
end
---~ do
---~ str = "a=1, b=2, c=3, d={abc}"
-
---~ for k,v in pairs(aux.settings_to_hash (str)) do print(k,v) end
---~ for k,v in pairs(aux.settings_to_array(str)) do print(k,v) end
---~ end
-
-function aux.hash_to_string(h,separator,yes,no,strict)
+function aux.hash_to_string(h,separator,yes,no,strict,omit)
if h then
- local t = { }
- for _,k in ipairs(table.sortedkeys(h)) do
- local v = h[k]
- if type(v) == "boolean" then
- if yes and no then
- if v then
- t[#t+1] = k .. '=' .. yes
- elseif not strict then
- t[#t+1] = k .. '=' .. no
+ local t, s = { }, table.sortedkeys(h)
+ omit = omit and table.tohash(omit)
+ for i=1,#s do
+ local key = s[i]
+ if not omit or not omit[key] then
+ local value = h[key]
+ if type(value) == "boolean" then
+ if yes and no then
+ if value then
+ t[#t+1] = key .. '=' .. yes
+ elseif not strict then
+ t[#t+1] = key .. '=' .. no
+ end
+ elseif value or not strict then
+ t[#t+1] = key .. '=' .. tostring(value)
end
- elseif v or not strict then
- t[#t+1] = k .. '=' .. tostring(v)
+ else
+ t[#t+1] = key .. '=' .. value
end
- else
- t[#t+1] = k .. '=' .. v
end
end
return table.concat(t,separator or ",")
diff --git a/tex/context/base/l-boolean.lua b/tex/context/base/l-boolean.lua
index 098f0b3a1..66c608cee 100644
--- a/tex/context/base/l-boolean.lua
+++ b/tex/context/base/l-boolean.lua
@@ -13,11 +13,12 @@ end
function toboolean(str,tolerant)
if tolerant then
- if type(str) == "string" then
+ local tstr = type(str)
+ if tstr == "string" then
return str == "true" or str == "yes" or str == "on" or str == "1"
- elseif type(str) == "number" then
+ elseif tstr == "number" then
return tonumber(str) ~= 0
- elseif type(str) == "nil" then
+ elseif tstr == "nil" then
return false
else
return str
diff --git a/tex/context/base/l-dir.lua b/tex/context/base/l-dir.lua
index df241d221..dfacfb291 100644
--- a/tex/context/base/l-dir.lua
+++ b/tex/context/base/l-dir.lua
@@ -13,15 +13,18 @@ dir = { }
if lfs then
function dir.glob_pattern(path,patt,recurse,action)
- for name in lfs.dir(path) do
- local full = path .. '/' .. name
- local mode = lfs.attributes(full,'mode')
- if mode == 'file' then
- if name:find(patt) then
- action(full)
+ local ok, scanner = xpcall(function() return lfs.dir(path) end, function() end) -- kepler safe
+ if ok and type(scanner) == "function" then
+ for name in scanner do
+ local full = path .. '/' .. name
+ local mode = lfs.attributes(full,'mode')
+ if mode == 'file' then
+ if name:find(patt) then
+ action(full)
+ end
+ elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
+ dir.glob_pattern(full,patt,recurse,action)
end
- elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
- dir.glob_pattern(full,patt,recurse,action)
end
end
end
@@ -46,6 +49,30 @@ if lfs then
return t
end
+ function dir.globfiles(path,recurse,func,files)
+ if type(func) == "string" then
+ local s = func -- alas, we need this indirect way
+ func = function(name) return name:find(s) end
+ end
+ files = files or { }
+ for name in lfs.dir(path) do
+ if name:find("^%.") then
+ --- skip
+ elseif lfs.attributes(name,'mode') == "directory" then
+ if recurse then
+ dir.globfiles(path .. "/" .. name,recurse,func,files)
+ end
+ elseif func then
+ if func(name) then
+ files[#files+1] = path .. "/" .. name
+ end
+ else
+ files[#files+1] = path .. "/" .. name
+ end
+ end
+ return files
+ end
+
-- t = dir.glob("c:/data/develop/context/sources/**/????-*.tex")
-- t = dir.glob("c:/data/develop/tex/texmf/**/*.tex")
-- t = dir.glob("c:/data/develop/context/texmf/**/*.tex")
@@ -62,33 +89,23 @@ if lfs then
--~ mkdirs(".","/a/b/c")
--~ mkdirs("a","b","c")
- function dir.mkdirs(...) -- root,... or ... ; root is not split
- local pth, err = "", false
- for k,v in pairs({...}) do
- if k == 1 then
- if not lfs.isdir(v) then
- -- print("no root path " .. v)
- err = true
- else
- pth = v
- end
- elseif lfs.isdir(pth .. "/" .. v) then
- pth = pth .. "/" .. v
+ function dir.mkdirs(...)
+ local pth, err, lst = "", false, table.concat({...},"/")
+ for _, s in ipairs(lst:split("/")) do
+ if pth == "" then
+ pth = (s == "" and "/") or s
else
- for _,s in pairs(v:split("/")) do
- pth = pth .. "/" .. s
- if not lfs.isdir(pth) then
- ok = lfs.mkdir(pth)
- if not lfs.isdir(pth) then
- err = true
- end
- end
- if err then break end
- end
+ pth = pth .. "/" .. s
+ end
+ if s == "" then
+ -- can be network path
+ elseif not lfs.isdir(pth) then
+ lfs.mkdir(pth)
end
- if err then break end
end
return pth, not err
end
+ dir.makedirs = dir.mkdirs
+
end
diff --git a/tex/context/base/l-io.lua b/tex/context/base/l-io.lua
index e53b8fb6a..b7783f892 100644
--- a/tex/context/base/l-io.lua
+++ b/tex/context/base/l-io.lua
@@ -73,32 +73,53 @@ end
do
+ local sb = string.byte
+
+--~ local nextchar = {
+--~ [ 4] = function(f)
+--~ return f:read(1), f:read(1), f:read(1), f:read(1)
+--~ end,
+--~ [ 2] = function(f)
+--~ return f:read(1), f:read(1)
+--~ end,
+--~ [ 1] = function(f)
+--~ return f:read(1)
+--~ end,
+--~ [-2] = function(f)
+--~ local a = f:read(1)
+--~ local b = f:read(1)
+--~ return b, a
+--~ end,
+--~ [-4] = function(f)
+--~ local a = f:read(1)
+--~ local b = f:read(1)
+--~ local c = f:read(1)
+--~ local d = f:read(1)
+--~ return d, c, b, a
+--~ end
+--~ }
+
local nextchar = {
[ 4] = function(f)
- return f:read(1), f:read(1), f:read(1), f:read(1)
+ return f:read(1,1,1,1)
end,
[ 2] = function(f)
- return f:read(1), f:read(1)
+ return f:read(1,1)
end,
[ 1] = function(f)
return f:read(1)
end,
[-2] = function(f)
- local a = f:read(1)
- local b = f:read(1)
+ local a, b = f:read(1,1)
return b, a
end,
[-4] = function(f)
- local a = f:read(1)
- local b = f:read(1)
- local c = f:read(1)
- local c = f:read(1)
+ local a, b, c, d = f:read(1,1,1,1)
return d, c, b, a
end
}
function io.characters(f,n)
- local sb = string.byte
if f then
return nextchar[n or 1], f
else
@@ -110,12 +131,62 @@ end
do
+ local sb = string.byte
+
+--~ local nextbyte = {
+--~ [4] = function(f)
+--~ local a = f:read(1)
+--~ local b = f:read(1)
+--~ local c = f:read(1)
+--~ local d = f:read(1)
+--~ if d then
+--~ return sb(a), sb(b), sb(c), sb(d)
+--~ else
+--~ return nil, nil, nil, nil
+--~ end
+--~ end,
+--~ [2] = function(f)
+--~ local a = f:read(1)
+--~ local b = f:read(1)
+--~ if b then
+--~ return sb(a), sb(b)
+--~ else
+--~ return nil, nil
+--~ end
+--~ end,
+--~ [1] = function (f)
+--~ local a = f:read(1)
+--~ if a then
+--~ return sb(a)
+--~ else
+--~ return nil
+--~ end
+--~ end,
+--~ [-2] = function (f)
+--~ local a = f:read(1)
+--~ local b = f:read(1)
+--~ if b then
+--~ return sb(b), sb(a)
+--~ else
+--~ return nil, nil
+--~ end
+--~ end,
+--~ [-4] = function(f)
+--~ local a = f:read(1)
+--~ local b = f:read(1)
+--~ local c = f:read(1)
+--~ local d = f:read(1)
+--~ if d then
+--~ return sb(d), sb(c), sb(b), sb(a)
+--~ else
+--~ return nil, nil, nil, nil
+--~ end
+--~ end
+--~ }
+
local nextbyte = {
[4] = function(f)
- local a = f:read(1)
- local b = f:read(1)
- local c = f:read(1)
- local d = f:read(1)
+ local a, b, c, d = f:read(1,1,1,1)
if d then
return sb(a), sb(b), sb(c), sb(d)
else
@@ -123,8 +194,7 @@ do
end
end,
[2] = function(f)
- local a = f:read(1)
- local b = f:read(1)
+ local a, b = f:read(1,1)
if b then
return sb(a), sb(b)
else
@@ -140,8 +210,7 @@ do
end
end,
[-2] = function (f)
- local a = f:read(1)
- local b = f:read(1)
+ local a, b = f:read(1,1)
if b then
return sb(b), sb(a)
else
@@ -149,10 +218,7 @@ do
end
end,
[-4] = function(f)
- local a = f:read(1)
- local b = f:read(1)
- local c = f:read(1)
- local d = f:read(1)
+ local a, b, c, d = f:read(1,1,1,1)
if d then
return sb(d), sb(c), sb(b), sb(a)
else
@@ -162,7 +228,6 @@ do
}
function io.bytes(f,n)
- local sb = string.byte
if f then
return nextbyte[n or 1], f
else
@@ -171,3 +236,35 @@ do
end
end
+
+function io.ask(question,default,options)
+ while true do
+ io.write(question)
+ if options then
+ io.write(string.format(" [%s]",table.concat(options,"|")))
+ end
+ if default then
+ io.write(string.format(" [%s]",default))
+ end
+ io.write(string.format(" "))
+ local answer = io.read()
+ answer = answer:gsub("^%s*(.*)%s*$","%1")
+ if answer == "" and default then
+ return default
+ elseif not options then
+ return answer
+ else
+ for _,v in pairs(options) do
+ if v == answer then
+ return answer
+ end
+ end
+ local pattern = "^" .. answer
+ for _,v in pairs(options) do
+ if v:find(pattern) then
+ return v
+ end
+ end
+ end
+ end
+end
diff --git a/tex/context/base/l-lpeg.lua b/tex/context/base/l-lpeg.lua
new file mode 100644
index 000000000..9e589621b
--- /dev/null
+++ b/tex/context/base/l-lpeg.lua
@@ -0,0 +1,41 @@
+-- filename : l-lpeg.lua
+-- author : Hans Hagen, PRAGMA-ADE, Hasselt NL
+-- copyright: PRAGMA ADE / ConTeXt Development Team
+-- license : see context related readme files
+
+if not versions then versions = { } end versions['l-lpeg'] = 1.001
+
+--~ l-lpeg.lua :
+
+--~ lpeg.digit = lpeg.R('09')^1
+--~ lpeg.sign = lpeg.S('+-')^1
+--~ lpeg.cardinal = lpeg.P(lpeg.sign^0 * lpeg.digit^1)
+--~ lpeg.integer = lpeg.P(lpeg.sign^0 * lpeg.digit^1)
+--~ lpeg.float = lpeg.P(lpeg.sign^0 * lpeg.digit^0 * lpeg.P('.') * lpeg.digit^1)
+--~ lpeg.number = lpeg.float + lpeg.integer
+--~ lpeg.oct = lpeg.P("0") * lpeg.R('07')^1
+--~ lpeg.hex = lpeg.P("0x") * (lpeg.R('09') + lpeg.R('AF'))^1
+--~ lpeg.uppercase = lpeg.P("AZ")
+--~ lpeg.lowercase = lpeg.P("az")
+
+--~ lpeg.eol = lpeg.S('\r\n\f')^1 -- includes formfeed
+--~ lpeg.space = lpeg.S(' ')^1
+--~ lpeg.nonspace = lpeg.P(1-lpeg.space)^1
+--~ lpeg.whitespace = lpeg.S(' \r\n\f\t')^1
+--~ lpeg.nonwhitespace = lpeg.P(1-lpeg.whitespace)^1
+
+function lpeg.anywhere(pattern) --slightly adapted from website
+ return lpeg.P { lpeg.P(pattern) + 1 * lpeg.V(1) }
+end
+
+function lpeg.startswith(pattern) --slightly adapted
+ return lpeg.P(pattern)
+end
+
+--~ g = lpeg.splitter(" ",function(s) ... end) -- gmatch:lpeg = 3:2
+
+function lpeg.splitter(pattern, action)
+ return (((1-lpeg.P(pattern))^1)/action+1)^0
+end
+
+
diff --git a/tex/context/base/l-md5.lua b/tex/context/base/l-md5.lua
index 2a24f4169..4deb9bd74 100644
--- a/tex/context/base/l-md5.lua
+++ b/tex/context/base/l-md5.lua
@@ -13,6 +13,6 @@ if md5 then do
if not md5.HEX then function md5.HEX(str) return convert(str,"%02X") end end
if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end
- if not md5.dec then function md5.dec(str) return convert(stt,"%03i") end end
+ if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end
end end
diff --git a/tex/context/base/l-os.lua b/tex/context/base/l-os.lua
index 0c1d92911..1173a928e 100644
--- a/tex/context/base/l-os.lua
+++ b/tex/context/base/l-os.lua
@@ -25,3 +25,35 @@ end
if not os.setenv then
function os.setenv() return false end
end
+
+if not os.times then
+ -- utime = user time
+ -- stime = system time
+ -- cutime = children user time
+ -- cstime = children system time
+ function os.times()
+ return {
+ utime = os.clock(), -- user
+ stime = 0, -- system
+ cutime = 0, -- children user
+ cstime = 0, -- children system
+ }
+ end
+end
+
+if os.gettimeofday then
+ os.clock = os.gettimeofday
+end
+
+do
+ local startuptime = os.gettimeofday()
+ function os.runtime()
+ return os.gettimeofday() - startuptime
+ end
+end
+
+--~ print(os.gettimeofday()-os.time())
+--~ os.sleep(1.234)
+--~ print (">>",os.runtime())
+--~ print(os.date("%H:%M:%S",os.gettimeofday()))
+--~ print(os.date("%H:%M:%S",os.time()))
diff --git a/tex/context/base/l-table.lua b/tex/context/base/l-table.lua
index 14fad31d7..c39a72ec2 100644
--- a/tex/context/base/l-table.lua
+++ b/tex/context/base/l-table.lua
@@ -37,12 +37,17 @@ function table.sortedkeys(tab)
srt[#srt+1] = key
if kind == 3 then
-- no further check
- elseif type(key) == "string" then
- if kind == 2 then kind = 3 else kind = 1 end
- elseif type(key) == "number" then
- if kind == 1 then kind = 3 else kind = 2 end
else
- kind = 3
+ local tkey = type(key)
+ if tkey == "string" then
+ -- if kind == 2 then kind = 3 else kind = 1 end
+ kind = (kind == 2 and 3) or 1
+ elseif tkey == "number" then
+ -- if kind == 1 then kind = 3 else kind = 2 end
+ kind = (kind == 1 and 3) or 2
+ else
+ kind = 3
+ end
end
end
if kind == 0 or kind == 3 then
@@ -65,32 +70,96 @@ function table.prepend(t, list)
end
end
+--~ function table.merge(t, ...)
+--~ for _, list in ipairs({...}) do
+--~ for k,v in pairs(list) do
+--~ t[k] = v
+--~ end
+--~ end
+--~ return t
+--~ end
+
function table.merge(t, ...)
- for _, list in ipairs({...}) do
- for k,v in pairs(list) do
+ local lst = {...}
+ for i=1,#lst do
+ for k, v in pairs(lst[i]) do
t[k] = v
end
end
+ return t
end
+--~ function table.merged(...)
+--~ local tmp = { }
+--~ for _, list in ipairs({...}) do
+--~ for k,v in pairs(list) do
+--~ tmp[k] = v
+--~ end
+--~ end
+--~ return tmp
+--~ end
+
function table.merged(...)
- local tmp = { }
- for _, list in ipairs({...}) do
- for k,v in pairs(list) do
+ local tmp, lst = { }, {...}
+ for i=1,#lst do
+ for k, v in pairs(lst[i]) do
tmp[k] = v
end
end
return tmp
end
-if not table.fastcopy then
+--~ function table.imerge(t, ...)
+--~ for _, list in ipairs({...}) do
+--~ for _, v in ipairs(list) do
+--~ t[#t+1] = v
+--~ end
+--~ end
+--~ return t
+--~ end
- function table.fastcopy(old) -- fast one
+function table.imerge(t, ...)
+ local lst = {...}
+ for i=1,#lst do
+ local nst = lst[i]
+ for j=1,#nst do
+ t[#t+1] = nst[j]
+ end
+ end
+ return t
+end
+
+--~ function table.imerged(...)
+--~ local tmp = { }
+--~ for _, list in ipairs({...}) do
+--~ for _,v in pairs(list) do
+--~ tmp[#tmp+1] = v
+--~ end
+--~ end
+--~ return tmp
+--~ end
+
+function table.imerged(...)
+ local tmp, lst = { }, {...}
+ for i=1,#lst do
+ local nst = lst[i]
+ for j=1,#nst do
+ tmp[#tmp+1] = nst[j]
+ end
+ end
+ return tmp
+end
+
+if not table.fastcopy then do
+
+ local type, pairs, getmetatable, setmetatable = type, pairs, getmetatable, setmetatable
+
+ local function fastcopy(old) -- fast one
if old then
local new = { }
for k,v in pairs(old) do
if type(v) == "table" then
- new[k] = table.fastcopy(v) -- was just table.copy
+ new[k] = fastcopy(v) -- was just table.copy
else
new[k] = v
end
@@ -105,11 +174,15 @@ if not table.fastcopy then
end
end
-end
+ table.fastcopy = fastcopy
+
+end end
-if not table.copy then
+if not table.copy then do
- function table.copy(t, tables) -- taken from lua wiki, slightly adapted
+ local type, pairs, getmetatable, setmetatable = type, pairs, getmetatable, setmetatable
+
+ local function copy(t, tables) -- taken from lua wiki, slightly adapted
tables = tables or { }
local tcopy = {}
if not tables[t] then
@@ -120,7 +193,7 @@ if not table.copy then
if tables[i] then
i = tables[i]
else
- i = table.copy(i, tables)
+ i = copy(i, tables)
end
end
if type(v) ~= "table" then
@@ -128,7 +201,7 @@ if not table.copy then
elseif tables[v] then
tcopy[i] = tables[v]
else
- tcopy[i] = table.copy(v, tables)
+ tcopy[i] = copy(v, tables)
end
end
local mt = getmetatable(t)
@@ -138,7 +211,9 @@ if not table.copy then
return tcopy
end
-end
+ table.copy = copy
+
+end end
-- rougly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack)
@@ -211,7 +286,9 @@ do
end
if n == #t then
local tt = { }
- for _,v in ipairs(t) do
+ -- for _,v in ipairs(t) do
+ for i=1,#t do
+ local v = t[i]
local tv = type(v)
if tv == "number" or tv == "boolean" then
tt[#tt+1] = tostring(v)
@@ -240,15 +317,16 @@ do
end
else
depth = ""
- if type(name) == "string" then
+ local tname = type(name)
+ if tname == "string" then
if name == "return" then
handle("return {")
else
handle(name .. "={")
end
- elseif type(name) == "number" then
+ elseif tname == "number" then
handle("[" .. name .. "]={")
- elseif type(name) == "boolean" then
+ elseif tname == "boolean" then
if name then
handle("return {")
else
@@ -263,7 +341,7 @@ do
local inline = compact and table.serialize_inline
local first, last = nil, 0 -- #root cannot be trusted here
if compact then
- for k,v in ipairs(root) do
+ for k,v in ipairs(root) do -- NOT: for k=1,#root do
if not first then first = k end
last = last + 1
end
diff --git a/tex/context/base/l-url.lua b/tex/context/base/l-url.lua
new file mode 100644
index 000000000..2e0907eb7
--- /dev/null
+++ b/tex/context/base/l-url.lua
@@ -0,0 +1,77 @@
+-- filename : l-url.lua
+-- author : Hans Hagen, PRAGMA-ADE, Hasselt NL
+-- copyright: PRAGMA ADE / ConTeXt Development Team
+-- license : see context related readme files
+
+if not versions then versions = { } end versions['l-url'] = 1.001
+if not url then url = { } end
+
+-- from the spec (on the web):
+--
+-- foo://example.com:8042/over/there?name=ferret#nose
+-- \_/ \______________/\_________/ \_________/ \__/
+-- | | | | |
+-- scheme authority path query fragment
+-- | _____________________|__
+-- / \ / \
+-- urn:example:animal:ferret:nose
+
+do
+
+ local function tochar(s)
+ return string.char(tonumber(s,16))
+ end
+
+ local colon, qmark, hash, slash, percent, endofstring = lpeg.P(":"), lpeg.P("?"), lpeg.P("#"), lpeg.P("/"), lpeg.P("%"), lpeg.P(-1)
+
+ local hexdigit = lpeg.R("09","AF","af")
+ local escaped = percent * lpeg.C(hexdigit * hexdigit) / tochar
+
+ local scheme = lpeg.Cs((escaped+(1-colon-slash-qmark-hash))^0) * colon + lpeg.Cc("")
+ local authority = slash * slash * lpeg.Cs((escaped+(1- slash-qmark-hash))^0) + lpeg.Cc("")
+ local path = lpeg.Cs((escaped+(1- qmark-hash))^0) + lpeg.Cc("")
+ local query = qmark * lpeg.Cs((escaped+(1- hash))^0) + lpeg.Cc("")
+ local fragment = hash * lpeg.Cs((escaped+(1- endofstring))^0) + lpeg.Cc("")
+
+ local parser = lpeg.Ct(scheme * authority * path * query * fragment)
+
+ function url.split(str)
+ return (type(str) == "string" and parser:match(str)) or str
+ end
+
+end
+
+function url.hashed(str)
+ str = url.split(str)
+ return { scheme = str[1], authority = str[2], path = str[3], query = str[4], fragment = str[5] }
+end
+
+function url.filename(filename)
+ local t = url.hashed(filename)
+ return (t.scheme == "file" and t.path:gsub("^/([a-zA-Z]:/)","%1")) or filename
+end
+
+--~ print(url.filename("file:///c:/oeps.txt"))
+--~ print(url.filename("c:/oeps.txt"))
+--~ print(url.filename("file:///oeps.txt"))
+--~ print(url.filename("/oeps.txt"))
+
+-- from the spec on the web (sort of):
+--~
+--~ function test(str)
+--~ print(table.serialize(url.hashed(str)))
+--~ -- print(table.serialize(url.split(str)))
+--~ end
+---~
+--~ test("%56pass%20words")
+--~ test("file:///c:/oeps.txt")
+--~ test("ftp://ftp.is.co.za/rfc/rfc1808.txt")
+--~ test("http://www.ietf.org/rfc/rfc2396.txt")
+--~ test("ldap://[2001:db8::7]/c=GB?objectClass?one#what")
+--~ test("mailto:John.Doe@example.com")
+--~ test("news:comp.infosystems.www.servers.unix")
+--~ test("tel:+1-816-555-1212")
+--~ test("telnet://192.0.2.16:80/")
+--~ test("urn:oasis:names:specification:docbook:dtd:xml:4.1.2")
+--~ test("/etc/passwords")
+--~ test("http://www.pragma-ade.com/spaced%20name")
diff --git a/tex/context/base/l-xml.lua b/tex/context/base/l-xml.lua
index 9236411f7..a15e3e81b 100644
--- a/tex/context/base/l-xml.lua
+++ b/tex/context/base/l-xml.lua
@@ -51,7 +51,8 @@ xml.xmlns = { }
do
- local parser = lpeg.P(false) -- printing shows that this has no side effects
+ local check = lpeg.P(false)
+ local parse = check
--[[ldx--
<p>The next function associates a namespace prefix with an <l n='url'/>. This
@@ -63,7 +64,8 @@ do
--ldx]]--
function xml.registerns(namespace, pattern) -- pattern can be an lpeg
- parser = parser + lpeg.C(lpeg.P(pattern:lower())) / namespace
+ check = check + lpeg.C(lpeg.P(pattern:lower())) / namespace
+ parse = lpeg.P { lpeg.P(check) + 1 * lpeg.V(1) }
end
--[[ldx--
@@ -77,7 +79,7 @@ do
--ldx]]--
function xml.checkns(namespace,url)
- local ns = parser:match(url:lower())
+ local ns = parse:match(url:lower())
if ns and namespace ~= ns then
xml.xmlns[namespace] = ns
end
@@ -95,7 +97,7 @@ do
--ldx]]--
function xml.resolvens(url)
- return parser:match(url:lower()) or ""
+ return parse:match(url:lower()) or ""
end
--[[ldx--
@@ -146,11 +148,15 @@ do
local mt = { __tostring = xml.text }
+ function xml.check_error(top,toclose)
+ return ""
+ end
+
local function add_attribute(namespace,tag,value)
if tag == "xmlns" then
xmlns[#xmlns+1] = xml.resolvens(value)
at[tag] = value
- elseif ns == "xmlns" then
+ elseif namespace == "xmlns" then
xml.checkns(tag,value)
at["xmlns:" .. tag] = value
else
@@ -162,7 +168,7 @@ do
dt[#dt+1] = spacing
end
local resolved = (namespace == "" and xmlns[#xmlns]) or nsremap[namespace] or namespace
- top = { ns=namespace or "", nr=resolved, tg=tag, at=at, dt={}, __p__ = stack[#stack] }
+ top = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = stack[#stack] }
setmetatable(top, mt)
dt = top.dt
stack[#stack+1] = top
@@ -175,9 +181,9 @@ do
local toclose = remove(stack)
top = stack[#stack]
if #stack < 1 then
- errorstr = string.format("nothing to close with %s", tag)
+ errorstr = string.format("nothing to close with %s %s", tag, xml.check_error(top,toclose) or "")
elseif toclose.tg ~= tag then -- no namespace check
- errorstr = string.format("unable to close %s with %s", toclose.tg, tag)
+ errorstr = string.format("unable to close %s with %s %s", toclose.tg, tag, xml.check_error(top,toclose) or "")
end
dt = top.dt
dt[#dt+1] = toclose
@@ -193,7 +199,7 @@ do
top = stack[#stack]
setmetatable(top, mt)
dt = top.dt
- dt[#dt+1] = { ns=namespace or "", nr=resolved, tg=tag, at=at, dt={}, __p__ = top }
+ dt[#dt+1] = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = top }
at = { }
if at.xmlns then
remove(xmlns)
@@ -282,14 +288,13 @@ do
-- text + comment + emptyelement + cdata + instruction + lpeg.V("parent"), -- 5.8
-- text + lpeg.V("parent") + emptyelement + comment + cdata + instruction, -- 5.5
-
local grammar = lpeg.P { "preamble",
preamble = utfbom^0 * instruction^0 * (doctype + comment + instruction)^0 * lpeg.V("parent") * trailer,
parent = beginelement * lpeg.V("children")^0 * endelement,
children = text + lpeg.V("parent") + emptyelement + comment + cdata + instruction,
}
- function xml.convert(data, no_root) -- no collapse any more
+ function xml.convert(data, no_root)
stack, top, at, xmlns, errorstr, result = {}, {}, {}, {}, nil, nil
stack[#stack+1] = top
top.dt = { }
@@ -300,7 +305,7 @@ do
errorstr = "invalid xml file"
end
if errorstr then
- result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={} } } }
+ result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={}, er = true } }, error = true }
setmetatable(stack, mt)
if xml.error_handler then xml.error_handler("load",errorstr) end
else
@@ -324,6 +329,10 @@ do
function. Maybe it will go away (when not used).</p>
--ldx]]--
+ function xml.is_valid(root)
+ return root and root.dt and root.dt[1] and type(root.dt[1]) == "table" and not root.dt[1].er
+ end
+
function xml.package(tag,attributes,data)
local ns, tg = tag:match("^(.-):?([^:]+)$")
local t = { ns = ns, tg = tg, dt = data or "", at = attributes or {} }
@@ -331,6 +340,10 @@ do
return t
end
+ function xml.is_valid(root)
+ return root and not root.error
+ end
+
xml.error_handler = (logs and logs.report) or print
end
@@ -343,16 +356,18 @@ a filename or a file handle.</p>
function xml.load(filename)
if type(filename) == "string" then
- local root, f = { }, io.open(filename,'r')
+ local f = io.open(filename,'r')
if f then
- root = xml.convert(f:read("*all"))
+ local root = xml.convert(f:read("*all"))
f:close()
+ return root
else
- -- if we want an error: root = xml.convert("")
+ return xml.convert("")
end
- return root -- no nil but an empty table if it fails
- else
+ elseif filename then -- filehandle
return xml.convert(filename:read("*all"))
+ else
+ return xml.convert("")
end
end
@@ -494,10 +509,10 @@ do
else
if ats then
-- handle(format("<%s:%s %s/>",ens,etg,table.concat(ats," ")))
- handle("<%" .. ens .. ":" .. etg .. table.concat(ats," ") .. "/>")
+ handle("<" .. ens .. ":" .. etg .. table.concat(ats," ") .. "/>")
else
-- handle(format("<%s:%s/>",ens,etg))
- handle("<%" .. ens .. ":" .. "/>")
+ handle("<" .. ens .. ":" .. "/>")
end
end
else
@@ -706,6 +721,8 @@ do
str = str:gsub("@([a-zA-Z%-_]+)", "(a['%1'] or '')")
str = str:gsub("position%(%)", "i")
str = str:gsub("text%(%)", "t")
+ str = str:gsub("!=", "~=")
+ str = str:gsub("([^=!~<>])=([^=!~<>])", "%1==%2")
str = str:gsub("([a-zA-Z%-_]+)%(", "functions.%1(")
return str, loadstring(string.format("return function(functions,i,a,t) return %s end", str))()
end
@@ -730,7 +747,7 @@ do
local bar = lpeg.P('|')
local hat = lpeg.P('^')
local valid = lpeg.R('az', 'AZ', '09') + lpeg.S('_-')
- local name_yes = lpeg.C(valid^1) * colon * lpeg.C(valid^1)
+ local name_yes = lpeg.C(valid^1) * colon * lpeg.C(valid^1 + star) -- permits ns:*
local name_nop = lpeg.C(lpeg.P(true)) * lpeg.C(valid^1)
local name = name_yes + name_nop
local number = lpeg.C((lpeg.S('+-')^0 * lpeg.R('09')^1)) / tonumber
@@ -851,8 +868,10 @@ do
-- root
return false
end
- elseif #map == 2 and m == 12 and map[2][1] == 20 then
- return { { 29, map[2][2], map[2][3] } }
+ elseif #map == 2 and m == 12 and map[2][1] == 20 then
+ -- return { { 29, map[2][2], map[2][3], map[2][4], map[2][5] } }
+ map[2][1] = 29
+ return { map[2] }
end
if m ~= 11 and m ~= 12 and m ~= 13 and m ~= 14 and m ~= 15 and m ~= 16 then
table.insert(map, 1, { 16 })
@@ -987,8 +1006,10 @@ do
local rootdt = root.dt
for k=1,#rootdt do
local e = rootdt[k]
- local ns, tg = e.rn or e.ns, e.tg
- if ns == action[2] and tg == action[3] then
+ local ns, tg = (e.rn or e.ns), e.tg
+ local matched = ns == action[3] and tg == action[4]
+ if not action[2] then matched = not matched end
+ if matched then
if handle(root,rootdt,k) then return false end
end
end
@@ -1001,7 +1022,8 @@ do
end
else
if (command == 16 or command == 12) and index == 1 then -- initial
- wildcard = true
+--~ wildcard = true
+ wildcard = command == 16 -- ok?
index = index + 1
action = pattern[index]
command = action and action[1] or 0 -- something is wrong
@@ -1032,7 +1054,8 @@ do
if tg then
idx = idx + 1
if command == 30 then
- local matched = ns == action[3] and tg == action[4]
+ local tg_a = action[4]
+ if tg == tg_a then matched = ns == action[3] elseif tg_a == '*' then matched, multiple = ns == action[3], true else matched = false end
if not action[2] then matched = not matched end
if matched then
n = n + dn
@@ -1050,20 +1073,23 @@ do
else
local matched, multiple = false, false
if command == 20 then -- match
- matched = ns == action[2] and tg == action[3]
+ local tg_a = action[4]
+ if tg == tg_a then matched = ns == action[3] elseif tg_a == '*' then matched, multiple = ns == action[3], true else matched = false end
if not action[2] then matched = not matched end
elseif command == 21 then -- match one of
multiple = true
- for i=2,#action,2 do
+ for i=3,#action,2 do
if ns == action[i] and tg == action[i+1] then matched = true break end
end
if not action[2] then matched = not matched end
elseif command == 22 then -- eq
- matched = ns == action[3] and tg == action[4]
+ local tg_a = action[4]
+ if tg == tg_a then matched = ns == action[3] elseif tg_a == '*' then matched, multiple = ns == action[3], true else matched = false end
if not action[2] then matched = not matched end
matched = matched and e.at[action[6]] == action[7]
elseif command == 23 then -- ne
- matched = ns == action[3] and tg == action[4]
+ local tg_a = action[4]
+ if tg == tg_a then matched = ns == action[3] elseif tg_a == '*' then matched, multiple = ns == action[3], true else matched = false end
if not action[2] then matched = not matched end
matched = mached and e.at[action[6]] ~= action[7]
elseif command == 24 then -- one of eq
@@ -1081,18 +1107,20 @@ do
if not action[2] then matched = not matched end
matched = matched and e.at[action[#action-1]] ~= action[#action]
elseif command == 27 then -- has attribute
- local ans = action[3]
- matched = ns == action[3] and tg == action[4]
+ local tg_a = action[4]
+ if tg == tg_a then matched = ns == action[3] elseif tg_a == '*' then matched, multiple = ns == action[3], true else matched = false end
if not action[2] then matched = not matched end
matched = matched and e.at[action[5]]
elseif command == 28 then -- has value
local edt = e.dt
- matched = ns == action[3] and tg == action[4]
+ local tg_a = action[4]
+ if tg == tg_a then matched = ns == action[3] elseif tg_a == '*' then matched, multiple = ns == action[3], true else matched = false end
if not action[2] then matched = not matched end
matched = matched and edt and edt[1] == action[5]
elseif command == 31 then
local edt = e.dt
- matched = ns == action[3] and tg == action[4]
+ local tg_a = action[4]
+ if tg == tg_a then matched = ns == action[3] elseif tg_a == '*' then matched, multiple = ns == action[3], true else matched = false end
if not action[2] then matched = not matched end
if matched then
matched = action[6](functions,idx,e.at,edt[1])
@@ -1537,28 +1565,33 @@ do
end
end
- function xml.include(xmldata,element,attribute,pathlist,collapse)
- element = element or 'ctx:include'
- attribute = attribute or 'name'
- pathlist = pathlist or { '.' }
- -- todo, check op ri
+ function xml.include(xmldata,pattern,attribute,recursive,findfile)
+ -- parse="text" (default: xml), encoding="" (todo)
+ pattern = pattern or 'include'
+ attribute = attribute or 'href'
local function include(r,d,k)
- local ek = d[k]
- local name = (ek.at and ek.at[attribute]) or ""
- if name ~= "" then
- -- maybe file lookup in tree
- local fullname
- for _, path in ipairs(pathlist) do
- if path == '.' then
- fullname = name
- else
- fullname = file.join(path,name)
- end
- local f = io.open(fullname)
+ local ek, name = d[k], nil
+ if ek.at then
+ for a in attribute:gmatch("([^|]+)") do
+ name = ek.at[a]
+ if name then break end
+ end
+ end
+ if name then
+ name = (findfile and findfile(name)) or name
+ if name ~= "" then
+ local f = io.open(name)
if f then
- xml.assign(d,k,xml.load(f,collapse))
+ if ek.at["parse"] == "text" then -- for the moment hard coded
+ d[k] = xml.escaped(f:read("*all"))
+ else
+ local xi = xml.load(f)
+ if recursive then
+ xml.include(xi,pattern,attribute,recursive,findfile)
+ end
+ xml.assign(d,k,xi)
+ end
f:close()
- break
else
xml.empty(d,k)
end
@@ -1567,7 +1600,7 @@ do
xml.empty(d,k)
end
end
- while xml.each_element(xmldata, element, include) do end
+ xml.each_element(xmldata, pattern, include)
end
function xml.strip_whitespace(root, pattern)
@@ -1635,6 +1668,20 @@ do
end)
end
+ function xml.filters.found(root,pattern,check_content)
+ local found = false
+ traverse(root, lpath(pattern), function(r,d,k)
+ if check_content then
+ local dk = d and d[k]
+ found = dk and dk.dt and next(dk.dt) and true
+ else
+ found = true
+ end
+ return true
+ end)
+ return found
+ end
+
end
--[[ldx--
@@ -1648,6 +1695,7 @@ xml.index = xml.filters.index
xml.position = xml.filters.index
xml.first = xml.filters.first
xml.last = xml.filters.last
+xml.found = xml.filters.found
xml.each = xml.each_element
xml.process = xml.process_element
@@ -1696,12 +1744,46 @@ function xml.serialize_path(root,lpath,handle)
xml.serialize(dk,handle)
end
-xml.escapes = { ['&'] = '&amp;', ['<'] = '&lt;', ['>'] = '&gt;', ['"'] = '&quot;' }
-xml.unescapes = { } for k,v in pairs(xml.escapes) do xml.unescapes[v] = k end
+--~ xml.escapes = { ['&'] = '&amp;', ['<'] = '&lt;', ['>'] = '&gt;', ['"'] = '&quot;' }
+--~ xml.unescapes = { } for k,v in pairs(xml.escapes) do xml.unescapes[v] = k end
+
+--~ function xml.escaped (str) return str:gsub("(.)" , xml.escapes ) end
+--~ function xml.unescaped(str) return str:gsub("(&.-;)", xml.unescapes) end
+--~ function xml.cleansed (str) return str:gsub("<.->" , '' ) end -- "%b<>"
+
+do
+
+ -- 100 * 2500 * "oeps< oeps> oeps&" : gsub:lpeg|lpeg|lpeg
+ --
+ -- 1021:0335:0287:0247
+
+ -- 10 * 1000 * "oeps< oeps> oeps& asfjhalskfjh alskfjh alskfjh alskfjh ;al J;LSFDJ"
+ --
+ -- 1559:0257:0288:0190 (last one suggested by roberto)
+
+ -- escaped = lpeg.Cs((lpeg.S("<&>") / xml.escapes + 1)^0)
+ -- escaped = lpeg.Cs((lpeg.S("<")/"&lt;" + lpeg.S(">")/"&gt;" + lpeg.S("&")/"&amp;" + 1)^0)
+ local normal = (1 - lpeg.S("<&>"))^0
+ local special = lpeg.P("<")/"&lt;" + lpeg.P(">")/"&gt;" + lpeg.P("&")/"&amp;"
+ local escaped = lpeg.Cs(normal * (special * normal)^0)
+
+ -- 100 * 1000 * "oeps&lt; oeps&gt; oeps&amp;" : gsub:lpeg == 0153:0280:0151:0080 (last one by roberto)
+
+ -- unescaped = lpeg.Cs((lpeg.S("&lt;")/"<" + lpeg.S("&gt;")/">" + lpeg.S("&amp;")/"&" + 1)^0)
+ -- unescaped = lpeg.Cs((((lpeg.P("&")/"") * (lpeg.P("lt")/"<" + lpeg.P("gt")/">" + lpeg.P("amp")/"&") * (lpeg.P(";")/"")) + 1)^0)
+ local normal = (1 - lpeg.S"&")^0
+ local special = lpeg.P("&lt;")/"<" + lpeg.P("&gt;")/">" + lpeg.P("&amp;")/"&"
+ local unescaped = lpeg.Cs(normal * (special * normal)^0)
-function xml.escaped (str) return str:gsub("(.)" , xml.escapes ) end
-function xml.unescaped(str) return str:gsub("(&.-;)", xml.unescapes) end
-function xml.cleansed (str) return str:gsub("<.->" , '' ) end -- "%b<>"
+ -- 100 * 5000 * "oeps <oeps bla='oeps' foo='bar'> oeps </oeps> oeps " : gsub:lpeg == 623:501 msec (short tags, less difference)
+
+ local cleansed = lpeg.Cs(((lpeg.P("<") * (1-lpeg.P(">"))^0 * lpeg.P(">"))/"" + 1)^0)
+
+ function xml.escaped (str) return escaped :match(str) end
+ function xml.unescaped(str) return unescaped:match(str) end
+ function xml.cleansed (str) return cleansed :match(str) end
+
+end
function xml.join(t,separator,lastseparator)
if #t > 0 then
@@ -1806,3 +1888,10 @@ end end
--~ xml.xshow(xml.first(x,"b[@n=='03' or @n=='08']"))
--~ xml.xshow(xml.all (x,"b[number(@n)>2 and number(@n)<6]"))
--~ xml.xshow(xml.first(x,"b[find(text(),'ALSO')]"))
+
+--~ str = [[
+--~ <?xml version="1.0" encoding="utf-8"?>
+--~ <story line='mojca'>
+--~ <windows>my secret</mouse>
+--~ </story>
+--~ ]]
diff --git a/tex/context/base/lang-ini.lua b/tex/context/base/lang-ini.lua
index fad3b22ec..f5091fbb2 100644
--- a/tex/context/base/lang-ini.lua
+++ b/tex/context/base/lang-ini.lua
@@ -1,3 +1,8 @@
+
+--~ lang:hyphenation(string)
+--~ string =lang:hyphenation()
+--~ lang:clear_hyphenation()
+
if not modules then modules = { } end modules ['lang-ini'] = {
version = 1.001,
comment = "companion to lang-ini.tex",
@@ -6,19 +11,46 @@ if not modules then modules = { } end modules ['lang-ini'] = {
license = "see context related readme files"
}
+if lang.use_new then lang.use_new(true) end
+
languages = languages or {}
languages.version = 1.009
-
-languages.hyphenation = languages.hyphenation or {}
-languages.hyphenation.data = languages.hyphenation.data or { }
+languages.hyphenation = languages.hyphenation or { }
+languages.hyphenation.data = languages.hyphenation.data or { }
do
-- we can consider hiding data (faster access too)
- local function filter(filename,what)
- local data = io.loaddata(input.find_file(texmf.instance,filename))
- local start, stop = data:find(string.format("\\%s%%s*(%%b{})",what or "patterns"))
- return (start and stop and data:sub(start+1,stop-1)) or ""
+ --~ local function filter(filename,what)
+ --~ local data = io.loaddata(input.find_file(texmf.instance,filename))
+ --~ local data = data:match(string.format("\\%s%%s*(%%b{})",what or "patterns"))
+ --~ return data:match("{%s*(.-)%s*}") or ""
+ --~ end
+
+ -- loading the 26 languages that we normally load in mkiv, the string based variant
+ -- takes .84 seconds (probably due to the sub's) while the lpeg variant takes .78
+ -- seconds
+
+ local leftbrace = lpeg.P("{")
+ local rightbrace = lpeg.P("}")
+ local spaces = lpeg.S(" \r\n\t\f")
+ local spacing = spaces^0
+ local validchar = 1-(spaces+rightbrace+leftbrace)
+ local validword = validchar^1
+ local content = spacing * leftbrace * spacing * lpeg.C((spacing * validword)^0) * spacing * rightbrace * lpeg.P(true)
+
+ local command = lpeg.P("\\patterns")
+ local parser = (1-command)^0 * command * content
+
+ local function filterpatterns(filename)
+ return parser:match(io.loaddata(input.find_file(texmf.instance,filename)) or "")
+ end
+
+ local command = lpeg.P("\\hyphenation")
+ local parser = (1-command)^0 * command * content
+
+ local function filterexceptions(filename)
+ return parser:match(io.loaddata(input.find_file(texmf.instance,filename)) or {})
end
local function record(tag)
@@ -32,40 +64,46 @@ do
languages.hyphenation.record = record
- function languages.hyphenation.number(tag)
+ function languages.hyphenation.define(tag)
local data = record(tag)
return data:id()
end
- function languages.hyphenation.load(tag, patterns, exceptions)
+ function languages.hyphenation.number(tag)
+ local d = languages.hyphenation.data[tag]
+ return (d and d:id()) or 0
+ end
+
+ function languages.hyphenation.load(tag, filename, filter, target)
input.starttiming(languages)
local data = record(tag)
- patterns = (patterns and input.find_file(texmf.instance,patterns )) or ""
- exceptions = (exceptions and input.find_file(texmf.instance,exceptions)) or ""
- if patterns ~= "" then
- data:patterns(filter(patterns,"patterns"))
- end
- if exceptions ~= "" then
- data:exceptions(string.split(filter(exceptions,"hyphenation"),"%s+"))
- -- local t = { }
- -- for s in string.gmatch(filter(exceptions,"hyphenation"), "(%S+)") do
- -- t[#t+1] = s
- -- end
- -- print(tag,#t)
- -- data:exceptions(t)
+ filename = (filename and filename ~= "" and input.find_file(texmf.instance,filename)) or ""
+ local ok = filename ~= ""
+ if ok then
+ lang[target](data,filterpatterns(filename))
+ else
+ lang[target](data,"")
end
languages.hyphenation.data[tag] = data
input.stoptiming(languages)
+ return ok
+ end
+
+ function languages.hyphenation.loadpatterns(tag, patterns)
+ return languages.hyphenation.load(tag, patterns, filterpatterns, "patterns")
+ end
+
+ function languages.hyphenation.loadexceptions(tag, exceptions)
+ return languages.hyphenation.load(tag, patterns, filterexceptions, "hyphenation")
end
function languages.hyphenation.exceptions(tag, ...)
local data = record(tag)
- data:exceptions(...)
+ data:hyphenation(...)
end
function languages.hyphenation.hyphenate(tag, str)
- local data = record(tag)
- return data:hyphenate(str)
+ return lang.hyphenate(record(tag), str)
end
function languages.hyphenation.lefthyphenmin(tag, value)
@@ -79,250 +117,231 @@ do
return data:righthyphenmin()
end
- function languages.n()
+ function languages.hyphenation.n()
return table.count(languages.hyphenation.data)
end
end
--- beware, the collowing code has to be adapted, and was used in
--- experiments with loading lists of words; if we keep supporting
--- this, i will add a namespace; this will happen when the hyphenation
--- code is in place
-
-languages.dictionary = languages.dictionary or {}
-languages.dictionary.data = languages.dictionary.data or { }
-languages.dictionary.template = "words-%s.txt"
-languages.dictionary.patterns = languages.dictionary.patterns or { }
-
--- maybe not in dictionary namespace
-
-languages.dictionary.current = nil
-languages.dictionary.number = nil
-languages.dictionary.attribute = nil
-
-function languages.dictionary.set(attribute,number,name)
- if not languages.dictionary.patterns[number] then
- input.start_timing(languages)
- local fullname = string.format(languages.dictionary.template,name)
- local foundname = input.find_file(texmf.instance,fullname,'other text file')
- if foundname and foundname ~= "" then
- -- texio.write_nl(string.format("loading patterns for language %s as %s from %s",name,number,foundname))
- languages.dictionary.patterns[number] = tex.load_dict(foundname) or { }
+do
+
+ -- we can speed this one up with locals if needed
+
+ local function tolang(what)
+ if type(what) == "number" then
+ return languages.hyphenation.data[languages.numbers[what]]
+ elseif type(what) == "string" then
+ return languages.hyphenation.data[what]
else
- languages.dictionary.patterns[number] = { }
+ return what
end
- input.stop_timing(languages)
end
- languages.dictionary.attribute = attribute
- languages.dictionary.number = number
- languages.dictionary.current = languages.dictionary.patterns[number]
+
+ function languages.prehyphenchar(what)
+ return lang.prehyphenchar(tolang(what))
+ end
+ function languages.posthyphenchar(what)
+ return lang.posthyphenchar(tolang(what))
+ end
+
+ languages.tolang = tolang
+
end
-function languages.dictionary.add(word,pattern)
- if languages.dictionary.current and word and pattern then
- languages.dictionary.current[word] = pattern
+languages.registered = languages.registered or { }
+languages.associated = languages.associated or { }
+languages.numbers = languages.numbers or { }
+
+input.storage.register(false,"languages/registered",languages.registered,"languages.registered")
+input.storage.register(false,"languages/associated",languages.associated,"languages.associated")
+
+function languages.register(tag,parent,patterns,exceptions)
+ parent = parent or tag
+ languages.registered[tag] = {
+ parent = parent,
+ patterns = patterns or string.format("lang-%s.pat",parent),
+ exceptions = exceptions or string.format("lang-%s.hyp",parent),
+ loaded = false,
+ number = 0,
+ }
+end
+
+function languages.associate(tag,script,language)
+ languages.associated[tag] = { script, language }
+end
+
+function languages.association(tag)
+ if type(tag) == "number" then
+ tag = languages.numbers[tag]
+ end
+ local lat = tag and languages.associated[tag]
+ if lat then
+ return lat[1], lat[2]
+ else
+ return nil, nil
end
end
-function languages.dictionary.remove(word)
- if languages.dictionary.current and word then
- languages.dictionary.current[word] = nil
+function languages.loadable(tag)
+ local l = languages.registered[tag]
+ if l and l.patterns and input.find_file(texmf.instance,patterns) then
+ return true
+ else
+ return false
end
end
-function languages.dictionary.hyphenate(str)
- if languages.dictionary.current then
- local result = languages.dictionary.current[str]
- if result then
- return result
- else
- -- todo: be clever
+languages.share = false -- we don't share language numbers
+
+function languages.enable(tags)
+ -- beware: we cannot set tex.language, but need tex.normallanguage
+ for i=1,#tags do
+ local tag = tags[i]
+ local l = languages.registered[tag]
+ if l then
+ if not l.loaded then
+ local tag = l.parent
+ local number = languages.hyphenation.number(tag)
+ if languages.share and number > 0 then
+ l.number = number
+ else
+ -- we assume the same filenames
+ l.number = languages.hyphenation.define(tag)
+ languages.hyphenation.loadpatterns(tag,l.patterns)
+ languages.hyphenation.loadexceptions(tag,l.exceptions)
+ languages.numbers[l.number] = tag
+ end
+ l.loaded = true
+ end
+ if l.number > 0 then
+ return l.number
+ end
end
end
- return str
+ return 0
end
-function languages.dictionary.found(number, str)
- local patterns = languages.dictionary.patterns[number]
- return patterns and patterns[str]
+-- e['implementer']= 'imple{m}{-}{-}menter'
+-- e['manual'] = 'man{}{}{}'
+-- e['as'] = 'a-s'
+-- e['user-friendly'] = 'user=friend-ly'
+-- e['exceptionally-friendly'] = 'excep-tionally=friend-ly'
+
+function languages.hyphenation.loadwords(tag, filename)
+ local id = languages.hyphenation.number(tag)
+ if id > 0 then
+ local l = lang.new(id)
+ input.starttiming(languages)
+ local data = io.loaddata(filename) or ""
+ l:hyphenation(data)
+ input.stoptiming(languages)
+ end
end
-do
+languages.hyphenation.define ("zerolanguage")
+languages.hyphenation.loadpatterns ("zerolanguage") -- else bug
+languages.hyphenation.loadexceptions("zerolanguage") -- else bug
- local discnode = node.new('disc')
+languages.logger = languages.logger or { }
- discnode.pre = node.new('glyph')
- discnode.pre.subtype = 0
- discnode.pre.char = 45 -- will be configurable
- discnode.pre.font = 0
+function languages.logger.report()
+ local result = {}
+ for _, tag in ipairs(table.sortedkeys(languages.registered)) do
+ local l = languages.registered[tag]
+ if l.loaded then
+ local p = (l.patterns and "pat") or '-'
+ local e = (l.exceptions and "exc") or '-'
+ result[#result+1] = string.format("%s:%s:%s:%s:%s", tag, l.parent, p, e, l.number)
+ end
+ end
+ return (#result > 0 and table.concat(result," ")) or "none"
+end
- local glyph, disc, kern = node.id('glyph'), node.id('disc'), node.id('kern')
- local bynode = node.traverse
- local bychar = string.utfcharacters
+languages.words = languages.words or {}
+languages.words.data = languages.words.data or {}
+languages.words.enable = false
+languages.words.threshold = 4
- local function reconstruct(prev,str,fnt)
- local done = false
- if #str < 4 then
- -- too short
- else
- local wrd = languages.dictionary.hyphenate(str)
- if wrd == str then
- -- not found
- else
- local pre, post, after, comp = nil, nil, false, nil
- for chr in bychar(wrd) do
- if prev then
- if not comp and prev.next and prev.next.subtype > 0 then
- comp = prev.next.components
- pre = node.copy(comp)
- comp = comp.next
- post, after = nil, false
- elseif chr == '-' then
- if not comp then
- done = true
- local n = node.copy(discnode)
- n.pre.font = fnt.font
- n.pre.attr = fnt.attr
- if pre then
- pre.next = n.pre
- n.pre = pre
- pre, pos, after = nil, nil, false
- end
- n.next = prev.next
- prev.next = n
- prev = n
- else
- after = true
- end
- elseif comp then
- local g = node.copy(comp)
- comp = comp.next
- if after then
- if post then post.next = g else post = g end
- else
- if pre then pre.next = g else pre = g end
- end
- if not comp then
- done = true
- local n = node.copy(discnode)
- n.pre.font = fnt.font
- n.pre.attr = fnt.attr
- pre.next = n.pre
- n.pre = pre
- n.post = post
- n.replace = 1
- n.next = prev.next
- prev.next = n
- prev = n
- pre, pos, after = nil, nil, false
- prev = prev.next -- hm, now we get error 1
- end
- else
- prev = prev.next
- end
- else
- -- print("ERROR 1")
- end
- end
- end
+languages.words.colors = {
+ ["known"] = "green",
+ ["unknown"] = "red",
+}
+
+do
+
+ spacing = lpeg.S(" \n\r\t")
+ markup = lpeg.S("-=")
+ lbrace = lpeg.P("{")
+ rbrace = lpeg.P("}")
+ disc = (lbrace * (1-rbrace)^0 * rbrace)^1 -- or just 3 times, time this
+ word = lpeg.Cs((markup/"" + disc/"" + (1-spacing))^1)
+
+ function languages.words.load(tag, filename)
+ local filename = input.find_file(texmf.instance,filename,'other text file') or ""
+ if filename ~= "" then
+ input.starttiming(languages)
+ local data = io.loaddata(filename) or ""
+ local words = languages.words.data[tag] or {}
+ parser = (spacing + word/function(s) words[s] = true end)^0
+ parser:match(data)
+ languages.words.data[tag] = words
+ input.stoptiming(languages)
end
- return done
end
- function nodes.hyphenate_words(head) -- we forget about the very first, no head stuff here
- local cd = characters.data
- local uc = utf.char
- local n, p = head, nil
- local done, prev, str, fnt, lan = false, false, "", nil, nil
- local currentlanguage = languages.dictionary.current
- local att, patterns = languages.dictionary.attribute, languages.dictionary.patterns
- local function action() -- maybe inline
- if reconstruct(prev,str,fnt) then
- done = true
- end
- str, prev = "", false
- end
- while n do
- local id = n.id
- if id == glyph then
- local l = node.has_attribute(n,att)
- if l then
- if l ~= lan then
- if prev then action() end
- lan = l
- languages.dictionary.current = patterns[lan]
- end
- elseif prev then
- action()
- end
- if not languages.dictionary.current then
- -- skip
- elseif n.subtype > 0 then
- if not prev then
- prev, fnt = p, n
- end
- for g in bynode(n.components) do
- str = str .. uc(g.char)
- end
- else
- local code = n.char
- if cd[code].lccode then
- if not prev then
- prev, fnt = p, n
- end
- str = str .. uc(code)
- elseif prev then
- action()
- end
- end
- elseif id == kern and n.subtype == 0 and p then
- p.next = n.next
- node.free(p,n)
- n = p
- elseif prev then
- action()
- end
- p = n
- n = n.next
- end
- if prev then
- action()
- end
- languages.dictionary.current = currentlanguage
- return head
+end
+
+function languages.words.found(id, str)
+ local tag = languages.numbers[id]
+ if tag then
+ local data = languages.words.data[tag]
+ return data and (data[str] or data[str:lower()])
+ else
+ return false
end
+end
+
+-- The following code is an adaption of experimental code for
+-- hyphenating and spell checking.
+
+do
- function nodes.mark_words(head,attribute,found)
+ local glyph, disc, kern = node.id('glyph'), node.id('disc'), node.id('kern')
+
+ local bynode = node.traverse
+ local bychar = string.utfcharacters
+
+ function mark_words(head,found) -- can be optimized
local cd = characters.data
local uc = utf.char
- local current, start, str, att, n = head, nil, "", nil, 0
+ local current, start, str, language, n = head, nil, "", nil, 0
local function action()
- local f = found(att,str)
- if f then
- for i=1,n do
- f(start)
- start = start.next
+ if #str > 0 then
+ local f = found(language,str)
+ if f then
+ for i=1,n do
+ f(start)
+ start = start.next
+ end
end
end
str, start, n = "", nil, 0
end
- local has_attribute = node.has_attribute
while current do
local id = current.id
if id == glyph then
- local a = has_attribute(current,attribute)
+ local a = current.lang
if a then
- if a ~= att then
+ if a ~= language then
if start then
action()
end
- att = a
+ language = a
end
elseif start then
action()
- att = a
+ language = a
end
if current.subtype > 0 then
start = start or current
@@ -332,7 +351,7 @@ do
end
else
local code = current.char
- if cd[code].lccode then
+ if cd[code].uccode or cd[code].lccode then
start = start or current
n = n + 1
str = str .. uc(code)
@@ -357,34 +376,65 @@ do
return head
end
- function languages.dictionary.check(head, attribute, yes, nop)
+ languages.words.methods = { }
+ languages.words.method = 1
+
+ languages.words.methods[1] = function(head, attribute, yes, nop)
local set = node.set_attribute
local unset = node.unset_attribute
local wrong, right = false, false
if nop then wrong = function(n) set(n,attribute,nop) end end
if yes then right = function(n) set(n,attribute,yes) end end
for n in node.traverse(head) do
- unset(n,attribute)
+ unset(n,attribute) -- hm
end
- local found = languages.dictionary.found
- nodes.mark_words(head, languages.dictionary.attribute, function(att,str)
- if #str < 4 then
+ local found, done = languages.words.found, false
+ mark_words(head, function(language,str)
+ if #str < languages.words.threshold then
return false
- elseif found(att,str) then
+ elseif found(language,str) then
+ done = true
return right
else
+ done = true
return wrong
end
end)
- nodes.hyphenate_words(head)
- return head
+ return head, done
+ end
+
+ local lw = languages.words
+
+ function languages.words.check(head)
+ if head.next and lw.enable then
+ local color = attributes.numbers['color']
+ local colors = lw.colors
+ local alc = attributes.list[color]
+ return lw.methods[lw.method](head, color, alc[colors.known], alc[colors.unknown])
+ else
+ return head, false
+ end
end
end
-languages.set = languages.dictionary.set
-languages.add = languages.dictionary.add
-languages.remove = languages.dictionary.remove
-languages.hyphenate = languages.dictionary.hyphenate
-languages.found = languages.dictionary.found
-languages.check = languages.dictionary.check
+-- for the moment we hook it into the attribute handler
+
+--~ languagehacks = { }
+
+--~ function languagehacks.process(namespace,attribute,head)
+--~ return languages.check(head)
+--~ end
+
+--~ chars.plugins.language = {
+--~ namespace = languagehacks,
+--~ processor = languagehacks.process
+--~ }
+
+-- must happen at the tex end
+
+languages.associate('en','latn','eng')
+languages.associate('uk','latn','eng')
+languages.associate('nl','latn','nld')
+languages.associate('de','latn','deu')
+languages.associate('fr','latn','fra')
diff --git a/tex/context/base/lang-ini.mkii b/tex/context/base/lang-ini.mkii
index a0f6f3881..9fa912acf 100644
--- a/tex/context/base/lang-ini.mkii
+++ b/tex/context/base/lang-ini.mkii
@@ -11,4 +11,135 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-% mkiv code is experimental
+\unprotect
+
+\def\mkdoloadpatterns#1#2%
+ {\expanded{\getcommacommandsize[\getvalue{\??la#2\s!encoding}]}%
+ \ifnum\commalistsize>0
+ %\message{[nofpatterns #2: \commalistsize/\getvalue{\??la#2\s!encoding}]}%
+ \dorecurse\commalistsize
+ {\expanded{\getfromcommacommand[\getvalue{\??la#2\s!encoding}][\recurselevel]}%
+ \let\patternencoding\commalistelement
+ \expanded{\getfromcommacommand[\getvalue{\??la#2\s!mapping }][\recurselevel]}%
+ \let\patternmapping \commalistelement
+ %\message{[patterns: #1/#2/\patternencoding/\patternmapping]}%
+ \dodoloadpatterns{#1}{#2}\patternencoding\patternmapping}%
+ \else
+ %\message{[patterns: #1/#2]}%
+ \dodoloadpatterns{#1}{#2}{}{}%
+ \fi}
+
+\def\setuphyppatencoding
+ {\pathypsettings
+ \enableregime[utf]}
+
+\def\dodoloadpatterns#1#2#3#4% beware, loaded language also incr
+ {\normallanguage\loadedlanguage % when not really needed
+ \bgroup
+ \let\synchronizepatterns\relax % needed?
+ \let\enabledmapping \empty % needed?
+ \doifelsenothing{#3}{\enableencoding[\s!default]}{\enableencoding[#3]}%
+ \doifelsenothing{#4}{\enablemapping [\s!default]}{\enablemapping [#4]}%
+ \setuphyppatencoding
+ \ifundefined{\??la\??la:\currentencoding:\currentmapping:#2}%
+ \let\doshowpatterns\relax
+ \edef\alreadyloadedlanguage
+ {\executeifdefined{\??la\??la:\currentencoding:\currentmapping:\truefilename{\f!languageprefix#2.\f!patternsextension}}\empty}%
+ \edef\alreadyloadedlanguage
+ {\executeifdefined{\??la\??la:\currentencoding:\currentmapping:\f!languageprefix#2.\f!patternsextension}\alreadyloadedlanguage}%
+ \ifx\alreadyloadedlanguage\empty
+ \letgvalue{\??la\??la:\currentencoding:\currentmapping:#2}\loadedlanguage
+ \doifundefined{\??la\??la:\s!default:\s!default:#2}{\letgvalue{\??la\??la:\s!default:\s!default:#2}\loadedlanguage}% fall back
+ \startpatternloading{\truefilename{\f!languageprefix#2.\f!patternsextension}}{#3}{#4}%
+ \readsysfile{\truefilename{\f!languageprefix#2.\f!patternsextension}}
+ {\setxvalue{\??la#1\s!patterns}{#2}%
+ \setxvalue{\??la\??la:\currentencoding:\currentmapping:\truefilename{\f!languageprefix#2.\f!patternsextension}}{\number\loadedlanguage}%
+ \xdef\preloadedpmessage{\preloadedpmessage\doshowpatterns{#2}{\number\normallanguage}{\currentencoding}{\currentmapping}}%
+ \doglobal\addtocommalist{#2}\preloadedpatterns
+ \showmessage\m!linguals1{#2,#1,\loadedlanguage,\currentencoding,\currentmapping}}
+ {\showmessage\m!linguals2{#2,#1,\loadedlanguage,\currentencoding,\currentmapping,\f!languageprefix#2.\f!patternsextension,\truefilename{\f!languageprefix#2.\f!patternsextension}}}%
+ \stoppatternloading
+ \startpatternloading{\truefilename{\f!languageprefix#2.\f!hyphensextension}}{#3}{#4}%
+ \readsysfile{\truefilename{\f!languageprefix#2.\f!hyphensextension}}
+ {\showmessage\m!linguals3{#2,#1,\loadedlanguage,\currentencoding,\currentmapping}}
+ {\showmessage\m!linguals4{#2,#1,\loadedlanguage,\currentencoding,\currentmapping}}%
+ \stoppatternloading
+ \doglobal\increment\loadedlanguage
+ % \stopencoding
+ \else % optimization, introduced 2004.08.24, while sorting out changes in tl
+ \letgvalue{\??la\??la:\currentencoding:\currentmapping:#2}\alreadyloadedlanguage
+ \doifundefined{\??la\??la:\s!default:\s!default:#2}{\letgvalue{\??la\??la:\s!default:\s!default:#2}\loadedlanguage}% fall back
+ \setxvalue{\??la#1\s!patterns}{#2}%
+ \xdef\preloadedpmessage{\preloadedpmessage\doshowpatterns{#2}{[\number\alreadyloadedlanguage]}{\currentencoding}{\currentmapping}}%
+ \doglobal\addtocommalist{#2}\preloadedpatterns
+ \showmessage\m!linguals1{#2,#1,[\alreadyloadedlanguage],\currentencoding,\currentmapping}%
+ \fi
+ \fi
+ \egroup}
+
+%D Since we can only load patterns in ini\TeX, we nil the
+%D loading before dumping (which saves a bit of memory, but
+%D strangely enough not in the format).
+
+\appendtoks
+ \gdef\doloadpatterns{\doglobal\increment\loadedlanguage\gobbletwoarguments}%
+ \globallet\dodoloadpatterns\gobblefourarguments
+\to \everydump
+
+\def\mkdoifpatternselse#1%
+ {\expanded{\doifinsetelse{#1}{\preloadedpatterns}}}
+
+\def\mksetnormallanguage#1#2% current default
+ {% called quite often, so we use \csname
+ % \def\synchronizepatterns{\setnormallanguage
+ % {\csname\??la\currentlanguage\s!patterns\endcsname}}% called often
+ % of even better pre-expand in an ugly way:
+ \@EA\def\@EA\synchronizepatterns\@EA{\@EA\dosetnormallanguage
+ \csname\??la\currentlanguage\s!patterns\endcsname}%
+ \donefalse
+ \synchronizepatterns
+ \ifdone\else
+ \def\synchronizepatterns{\dosetnormallanguage\currentlanguage}%
+ \synchronizepatterns
+ \ifdone\else
+ \ifx\currentdefaultlanguage\empty\else
+ \@EA\def\@EA\synchronizepatterns\@EA{\@EA\dosetnormallanguage
+ \csname\??la\currentdefaultlanguage\s!patterns\endcsname}%
+ \synchronizepatterns
+ \ifdone\else
+ \dosetnormallanguage\currentdefaultlanguage
+ \synchronizepatterns
+ \fi
+ \fi
+ \fi
+ \fi}
+
+\def\dosetnormallanguage#1% #1 == \cs
+ {\dodosetnormallanguage{:\currentencoding:\currentmapping:}#1{%
+ \dodosetnormallanguage{:\currentencoding:\s!default :}#1{%
+ \dodosetnormallanguage{:\s!default :\currentmapping:}#1{%
+ \dodosetnormallanguage{:\s!default :\s!default :}#1\empty}}}}
+
+\def\dodosetnormallanguage#1#2%
+ {\ifcsname\??la\??la#1#2\endcsname
+ \edef\thenormallanguage{\csname\??la\??la#1#2\endcsname}% can be \chardef
+ \ifx\thenormallanguage\empty
+ \@EAEAEA\firstofoneargument
+ \else
+ \donetrue
+ \@EA\xdef\csname\??la\currentlanguage\s!patterns\endcsname{#2}%
+ \normallanguage\thenormallanguage\relax % \relax is needed for lookahead problems
+ \@EAEAEA\gobbleoneargument
+ \fi
+ \else
+ \@EA\firstofoneargument
+ \fi}
+
+\beginXETEX
+ \def\synchronizepatternswithfont{}
+ \def\doloadpatterns #1#2{\dodoloadpatterns{#1}{#2}\s!default\s!default}
+ \def\setnormallanguage #1{\dosetnormallanguage{:\s!default:\s!default:}#1\empty}
+ \def\setuphyppatencoding {\pathypsettings}
+\endXETEX
+
+\protect \endinput
diff --git a/tex/context/base/lang-ini.mkiv b/tex/context/base/lang-ini.mkiv
index ac87d85e7..4e4c3c81f 100644
--- a/tex/context/base/lang-ini.mkiv
+++ b/tex/context/base/lang-ini.mkiv
@@ -15,14 +15,63 @@
\registerctxluafile{lang-ini}{1.001}
-\def\synchronizepatternswithfont{}
-\def\doloadpatterns #1#2{\dodoloadpatterns{#1}{#2}\s!default\s!default}
-\def\setnormallanguage #1{\dosetnormallanguage{:\s!default:\s!default:}#1\empty}
-\def\setuphyppatencoding {\pathypsettings}
+\let\synchronizepatterns \relax % todo: cleanup
+\let\synchronizepatternswithfont\relax % todo: cleanup
-% temporarily here, awaiting new mechanisms
+\def\mkdoloadpatterns#1#2%
+ {\ctxlua{languages.register(
+ "#1",
+ "#2",
+ "\truefilename{\f!languageprefix#2.\f!patternsextension}",
+ "\truefilename{\f!languageprefix#2.\f!hyphensextension }")
+ }}
-\def\loadpatternfiles#1{\ctxlua{languages.hyphenation.load('#1', 'lang-#1.pat', 'lang-#1.hyp')}}
-\def\hyphenateword #1{\ctxlua{tex.sprint(languages.hyphenation.hyphenate("\currentlanguage",[[#1]]))}}
+\def\mkdoifpatternselse#1%
+ {\ctxlua{cs.testcase(languages.loadable("#1"))}}
+
+\def\mksetnormallanguage#1#2% current default / we can freeze the number here
+ {\normallanguage=\ctxlua{tex.sprint(languages.enable({
+ "\csname\??la#1\s!patterns\endcsname","#1",
+ "\csname\??la#2\s!patterns\endcsname","#2",
+ }))}\relax}
+
+% to be tested
+%
+% \def\mkdosetnormallanguage#1#2% current default
+% {\normallanguage=\ctxlua{tex.sprint(languages.enable({
+% "\csname\??la#1\s!patterns\endcsname","#1",
+% "\csname\??la#2\s!patterns\endcsname","#2",
+% }))}}%
+% \setxvalue{\??la\??la#1#2}{\number\normallanguage}}
+%
+% \def\mksetnormallanguage#1#2% current default / we can freeze the number here
+% {\normallanguage\executeifdefined{\??la\??la#1#2}{\mkdosetnormallanguage{#1}{#2}}}
+
+
+\def\loadspellchecklist
+ {\dodoubleempty\doloadspellchecklist}
+
+% mkiv only -- todo: internationalize command names
+
+% \loadspellchecklist[en][words-en.txt]
+% \loadspellchecklist[nl][words-nl.txt]
+% \setupspellchecking[state=start]
+
+\def\loadspellchecklist[#1][#2]%
+ {\ctxlua{languages.words.load("#1","#2")}}
+
+\def\setupspellchecking
+ {\dosingleargument\dosetupspellchecking}
+
+\def\setupspellchecking[#1]% todo colors
+ {\getparameters[\??wl][#1]%
+ \doifelse\@@wlstate\v!start
+ {\ctxlua{languages.words.enable=true }}
+ {\ctxlua{languages.words.enable=false}}}
+
+\setupspellchecking
+ [\c!state=\v!stop]
+
+\uchyph=1
\protect \endinput
diff --git a/tex/context/base/lang-ini.tex b/tex/context/base/lang-ini.tex
index 6f0352772..bdefd6a1a 100644
--- a/tex/context/base/lang-ini.tex
+++ b/tex/context/base/lang-ini.tex
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D Todo : --language=pl,en,nl : nl incs number of language
+%D This module needs a further cleanup (real split between ii/iv).
%D This module implements the (for the moment still simple)
%D multi||language support of \CONTEXT, which should not be
@@ -157,24 +157,6 @@
\normallanguage\zerocount \def\loadedlanguage{1}
-%D Let's start with setting the lowercase code of quotes, so
-%D that we get proper hyphenation in languages like Dutch,
-%D French and Czech.
-
-% elsewhere: \lccode`\'=`\'
-
-% \def\showlccodes
-% {\currentlanguage:\space
-% \dostepwiserecurse{0}{255}{1}
-% {\ifnum\lccode\recurselevel>0
-% \char\recurselevel:\the\lccode\recurselevel\relax\space
-% \fi}
-% \endgraf}
-
-% \de \hyphenatedword{Works''} \showlccodes
-% \nl \hyphenatedword{Works''} \showlccodes
-% \uk \hyphenatedword{Works''} \showlccodes
-
%D \macros
%D {currentlanguage, setupcurrentlanguage}
%D
@@ -272,8 +254,7 @@
\def\doiflanguageelse#1{\doifdefinedelse{\??la#1\c!state}}
\def\doinstalllanguage[#1][#2]%
- {%\ConvertConstantAfter\doifinstringelse{=}{#2}
- \doifassignmentelse{#2}
+ {\doifassignmentelse{#2}
{\doiflanguageelse{#1}
{\getparameters[\??la#1][#2]}
{\setvalue{\l!prefix!#1}{#1}%
@@ -326,97 +307,11 @@
\let \patternencoding \s!default
\let \patternmapping \s!default
-\def\doloadpatterns#1#2%
- {\expanded{\getcommacommandsize[\getvalue{\??la#2\s!encoding}]}%
- \ifnum\commalistsize>0
- %\message{[nofpatterns #2: \commalistsize/\getvalue{\??la#2\s!encoding}]}%
- \dorecurse\commalistsize
- {\expanded{\getfromcommacommand[\getvalue{\??la#2\s!encoding}][\recurselevel]}%
- \let\patternencoding\commalistelement
- \expanded{\getfromcommacommand[\getvalue{\??la#2\s!mapping }][\recurselevel]}%
- \let\patternmapping \commalistelement
- %\message{[patterns: #1/#2/\patternencoding/\patternmapping]}%
- \dodoloadpatterns{#1}{#2}\patternencoding\patternmapping}%
- \else
- %\message{[patterns: #1/#2]}%
- \dodoloadpatterns{#1}{#2}{}{}%
- \fi}
-
-\def\setuphyppatencoding
- {\pathypsettings
- \enableregime[utf]}
-
-\beginXETEX
- \def\synchronizepatternswithfont{}
- \def\doloadpatterns #1#2{\dodoloadpatterns{#1}{#2}\s!default\s!default}
- \def\setnormallanguage #1{\dosetnormallanguage{:\s!default:\s!default:}#1\empty}
- \def\setuphyppatencoding {\pathypsettings}
-\endXETEX
-
-\beginLUATEX
- \def\synchronizepatternswithfont{}
- \def\doloadpatterns #1#2{\dodoloadpatterns{#1}{#2}\s!default\s!default}
- \def\setnormallanguage #1{\dosetnormallanguage{:\s!default:\s!default:}#1\empty}
- \def\setuphyppatencoding {\pathypsettings}
-\endLUATEX
-
-\def\dodoloadpatterns#1#2#3#4% beware, loaded language also incr
- {\normallanguage\loadedlanguage % when not really needed
- \bgroup
- \let\synchronizepatterns\relax % needed?
- \let\enabledmapping \empty % needed?
- \doifelsenothing{#3}{\enableencoding[\s!default]}{\enableencoding[#3]}%
- \doifelsenothing{#4}{\enablemapping [\s!default]}{\enablemapping [#4]}%
- \setuphyppatencoding
- \ifundefined{\??la\??la:\currentencoding:\currentmapping:#2}%
- \let\doshowpatterns\relax
- \edef\alreadyloadedlanguage
- {\executeifdefined{\??la\??la:\currentencoding:\currentmapping:\truefilename{\f!languageprefix#2.\f!patternsextension}}\empty}%
- \edef\alreadyloadedlanguage
- {\executeifdefined{\??la\??la:\currentencoding:\currentmapping:\f!languageprefix#2.\f!patternsextension}\alreadyloadedlanguage}%
- \ifx\alreadyloadedlanguage\empty
- \letgvalue{\??la\??la:\currentencoding:\currentmapping:#2}\loadedlanguage
- \doifundefined{\??la\??la:\s!default:\s!default:#2}{\letgvalue{\??la\??la:\s!default:\s!default:#2}\loadedlanguage}% fall back
- \startpatternloading{\truefilename{\f!languageprefix#2.\f!patternsextension}}{#3}{#4}%
- \readsysfile{\truefilename{\f!languageprefix#2.\f!patternsextension}}
- {\setxvalue{\??la#1\s!patterns}{#2}%
- \setxvalue{\??la\??la:\currentencoding:\currentmapping:\truefilename{\f!languageprefix#2.\f!patternsextension}}{\number\loadedlanguage}%
- \xdef\preloadedpmessage{\preloadedpmessage\doshowpatterns{#2}{\number\normallanguage}{\currentencoding}{\currentmapping}}%
- \doglobal\addtocommalist{#2}\preloadedpatterns
- \showmessage\m!linguals1{#2,#1,\loadedlanguage,\currentencoding,\currentmapping}}
- {\showmessage\m!linguals2{#2,#1,\loadedlanguage,\currentencoding,\currentmapping,\f!languageprefix#2.\f!patternsextension,\truefilename{\f!languageprefix#2.\f!patternsextension}}}%
- \stoppatternloading
- \startpatternloading{\truefilename{\f!languageprefix#2.\f!hyphensextension}}{#3}{#4}%
- \readsysfile{\truefilename{\f!languageprefix#2.\f!hyphensextension}}
- {\showmessage\m!linguals3{#2,#1,\loadedlanguage,\currentencoding,\currentmapping}}
- {\showmessage\m!linguals4{#2,#1,\loadedlanguage,\currentencoding,\currentmapping}}%
- \stoppatternloading
- \doglobal\increment\loadedlanguage
- % \stopencoding
- \else % optimization, introduced 2004.08.24, while sorting out changes in tl
- \letgvalue{\??la\??la:\currentencoding:\currentmapping:#2}\alreadyloadedlanguage
- \doifundefined{\??la\??la:\s!default:\s!default:#2}{\letgvalue{\??la\??la:\s!default:\s!default:#2}\loadedlanguage}% fall back
- \setxvalue{\??la#1\s!patterns}{#2}%
- \xdef\preloadedpmessage{\preloadedpmessage\doshowpatterns{#2}{[\number\alreadyloadedlanguage]}{\currentencoding}{\currentmapping}}%
- \doglobal\addtocommalist{#2}\preloadedpatterns
- \showmessage\m!linguals1{#2,#1,[\alreadyloadedlanguage],\currentencoding,\currentmapping}%
- \fi
- \fi
- \egroup}
-
-\def\doifpatternselse#1%
- {\expanded{\doifinsetelse{#1}{\preloadedpatterns}}}
+\ifx\mkloadpatterns \undefined \let\mkloadpatterns \gobbletwoarguments \fi
+\ifx\mkdoifpatternselse\undefined \let\mkdoifpatternselse\gobbletwoarguments \fi
-%D Since we can only load patterns in ini\TeX, we nil the
-%D loading before dumping (which saves a bit of memory, but
-%D strangely enough not in the format).
-
-\appendtoks
- \gdef\doloadpatterns{\doglobal\increment\loadedlanguage\gobbletwoarguments}%
- \globallet\dodoloadpatterns\gobblefourarguments
-\to \everydump
-
-\loadmarkfile{lang-ini} % not yet
+\def\doloadpatterns {\mkdoloadpatterns}
+\def\doifpatternselse{\mkdoifpatternselse}
%D \macros
%D {setuplanguage}
@@ -525,45 +420,12 @@
%D We take care of local as well as standardized language
%D switching (fr and fa, de and du, but nl and nl).
-% new, encoding specific patterns
-
\ifx\synchronizepatterns \undefined \let\synchronizepatterns\relax \fi
\ifx\synchronizepatternswithfont\undefined \def\synchronizepatternswithfont{\synchronizepatterns} \fi
-\beginTEX
-
-\def\dosetnormallanguage#1#2%
- {\@EA\ifx\csname\??la\??la#1#2\endcsname\relax
- \@EA\firstofoneargument
- \else\@EA\ifx\csname\??la\??la#1#2\endcsname\empty
- \@EAEAEA\firstofoneargument
- \else
- \donetrue
- \@EA\xdef\csname\??la\currentlanguage\s!patterns\endcsname{#2}%
- \normallanguage\csname\??la\??la#1#2\endcsname\relax % \relax is needed for lookahead problems
- \@EAEAEA\gobbleoneargument
- \fi\fi}
-
-\endTEX
-
-\beginETEX
-
-\def\dosetnormallanguage#1#2%
- {\ifcsname\??la\??la#1#2\endcsname
- \edef\thenormallanguage{\csname\??la\??la#1#2\endcsname}%
- \ifx\thenormallanguage\empty
- \@EAEAEA\firstofoneargument
- \else
- \donetrue
- \@EA\xdef\csname\??la\currentlanguage\s!patterns\endcsname{#2}%
- \normallanguage\thenormallanguage\relax % \relax is needed for lookahead problems
- \@EAEAEA\gobbleoneargument
- \fi
- \else
- \@EA\firstofoneargument
- \fi}
+\ifx\mksetnormallanguage\undefined \let\mksetnormallanguage\gobbletwoarguments \fi
-\endETEX
+\def\setnormallanguage{\mksetnormallanguage}
\newevery \everylanguage \relax
\newevery \everyresetlanguagespecifics \relax
@@ -571,20 +433,6 @@
\def\disablelanguagespecifics
{\ignorecompoundcharacter}
-% \def\setnormallanguage#1%
-% {\dosetnormallanguage{:\currentencoding:\currentmapping:}{#1}{%
-% \dosetnormallanguage{:\currentencoding:\s!default :}{#1}{%
-% \dosetnormallanguage{:\s!default :\currentmapping:}{#1}{%
-% \dosetnormallanguage{:\s!default :\s!default :}{#1}\empty}}}}
-%
-% assume #1 = \cs
-
-\def\setnormallanguage#1%
- {\dosetnormallanguage{:\currentencoding:\currentmapping:}#1{%
- \dosetnormallanguage{:\currentencoding:\s!default :}#1{%
- \dosetnormallanguage{:\s!default :\currentmapping:}#1{%
- \dosetnormallanguage{:\s!default :\s!default :}#1\empty}}}}
-
\def\sethyphenationvariables
{\lefthyphenmin 0\languageparameter\s!lefthyphenmin \relax
\righthyphenmin0\languageparameter\s!righthyphenmin\relax
@@ -593,53 +441,16 @@
\def\docomplexlanguage% assumes that \currentlanguage is set
{\edef\currentdefaultlanguage{\defaultlanguage\currentlanguage}%
- % called quite often, so we use \csname
- % \def\synchronizepatterns{\setnormallanguage
- % {\csname\??la\currentlanguage\s!patterns\endcsname}}% called often
- % of even better pre-expand in an ugly way:
- \@EA\def\@EA\synchronizepatterns\@EA{\@EA\setnormallanguage
- \csname\??la\currentlanguage\s!patterns\endcsname}%
- \donefalse
- \synchronizepatterns
- \ifdone\else
- \def\synchronizepatterns{\setnormallanguage\currentlanguage}%
- \synchronizepatterns
- \ifdone\else
- \ifx\currentdefaultlanguage\empty\else
- % \def\synchronizepatterns{\setnormallanguage
- % {\csname\??la\currentdefaultlanguage\s!patterns\endcsname}}%
- \@EA\def\@EA\synchronizepatterns\@EA{\@EA\setnormallanguage
- \csname\??la\currentdefaultlanguage\s!patterns\endcsname}%
- \synchronizepatterns
- \ifdone\else
- \setnormallanguage\currentdefaultlanguage
- \synchronizepatterns
- \fi
- \fi
- \fi
- \fi
+ \mksetnormallanguage\currentlanguage\currentdefaultlanguage
\the\everylanguage
\enablelanguagespecifics[\currentlanguage]%
- % strange, what is this doing here, dangerous for {il2,ec}
- % \edef\languagemapping{\csname\??la\currentlanguage\s!mapping\endcsname}%
- % \ifx\languagemapping\empty\else
- % \fastenablemapping\languagemapping
- % \fi
\sethyphenationvariables
- %\lefthyphenmin 0\languageparameter\s!lefthyphenmin
- %\righthyphenmin0\languageparameter\s!righthyphenmin
\relax
% will be definable and move to core-spa !
\doifelse{\languageparameter\c!spacing}\v!broad
\nonfrenchspacing\frenchspacing}
-\ifx\enablelanguagespecifics\undefined
-
- \def\enablelanguagespecifics[#1]{}
-
-\fi
-
-\beginETEX
+\ifx\enablelanguagespecifics\undefined \def\enablelanguagespecifics[#1]{} \fi
\def\complexlanguage[#1]%
{\edef\askedlanguage{#1}%
@@ -655,32 +466,10 @@
\fi
\fi}
-\endETEX
-
-\beginTEX
-
-\def\complexlanguage[#1]%
- {\edef\askedlanguage{#1}%
- \ifx\askedlanguage\empty \else
- \@EA\ifx\csname\l!prefix!\askedlanguage\endcsname\relax
- \showmessage\m!linguals6{#1}%
- \else
- \edef\askedlanguage{\csname\l!prefix!\askedlanguage\endcsname}%
- \ifx\currentlanguage\askedlanguage \else
- \setcurrentlanguage\currentmainlanguage\askedlanguage
- \docomplexlanguage
- \fi
- \fi
- \fi}
-
-\endTEX
-
\let\simplelanguage\normallanguage
\definecomplexorsimple\language
-\beginETEX
-
\def\mainlanguage[#1]%
{\edef\askedlanguage{#1}%
\ifx\askedlanguage\empty \else
@@ -693,24 +482,6 @@
\fi
\fi}
-\endETEX
-
-\beginTEX
-
-\def\mainlanguage[#1]%
- {\edef\askedlanguage{#1}%
- \ifx\askedlanguage\empty \else
- \@EA\ifx\csname\l!prefix!\askedlanguage\endcsname\relax\else
- \edef\askedlanguage{\csname\l!prefix!\askedlanguage\endcsname}%
- \ifx\currentmainlanguage\askedlanguage \else
- \setcurrentlanguage\askedlanguage\askedlanguage
- \docomplexlanguage
- \fi
- \fi
- \fi}
-
-\endTEX
-
%D \macros
%D {defaultlanguage,languagedefault}
%D
@@ -729,9 +500,6 @@
\def\languagedefault#1#2%
{\csname\??la\defaultlanguage{#1}#2\endcsname}
-% \def\languageparameter#1%
-% {\csname\??la\defaultlanguage\currentlanguage#1\endcsname}
-
\def\languageparameter % @EA = speedup
{\@EA\dolanguageparameter\@EA{\defaultlanguage\currentlanguage}}
@@ -744,120 +512,27 @@
\def\defaultlanguageparameter#1%
{\csname\??la\s!default#1\endcsname}
-\beginETEX
-
- \def\dolanguageparameter#1#2%
- {\csname\??la
- \ifcsname\??la\currentlanguage#2\endcsname
- \currentlanguage
- \else\ifcsname\??la#1#2\endcsname
- \@EA\ifx\csname\??la#1#2\endcsname\empty\s!default\else#1\fi
- \else
- \s!default
- \fi\fi
- #2\endcsname}
-
- \def\dospecificlanguageparameter#1#2#3%
- {\csname\??la
- \ifcsname\??la#2#3\endcsname
- \@EA\ifx\csname\??la#2#3\endcsname\empty\s!default\else#2\fi
- \else\ifcsname\??la#1#3\endcsname
- \@EA\ifx\csname\??la#1#3\endcsname\empty\s!default\else#1\fi
- \else
- \s!default
- \fi\fi
- #3\endcsname}
-
-\endETEX
-
-\beginTEX
-
- \def\dolanguageparameter#1#2%
- {\csname\??la
- \@EA\ifx\csname\??la\currentlanguage#2\endcsname\relax
- \@EA\ifx\csname\??la#1#2\endcsname\relax
- \s!default
- \else
- \@EA\ifx\csname\??la#1#2\endcsname\empty\s!default\else#1\fi
- \fi
- \else
- \currentlanguage
- \fi
- #2\endcsname}
-
- \def\dospecificlanguageparameter#1#2#3%
- {\csname\??la
- \@EA\ifx\csname\??la#2#3\endcsname\relax
- \@EA\ifx\csname\??la#1#3\endcsname\relax
- \s!default
- \else
- \@EA\ifx\csname\??la#1#3\endcsname\empty\s!default\else#1\fi
- \fi
- \else
- \@EA\ifx\csname\??la#2#3\endcsname\empty\s!default\else#2\fi
- \fi
- #3\endcsname}
-
-\endTEX
-
-% moved
-%
-% %D \macros
-% %D {leftguillemot,rightguillemot,leftsubguillemot,rightsubguillemot,
-% %D ...single...quote,...double...quote}
-% %D
-% %D We assign logical names to all kind of quote and sentence
-% %D boundary characters.
-% %D
-% %D When using Computer Modern Roman, the next definitions
-% %D looks a bit better than the default ligatures.
-% %D
-% %D \starttyping
-% %D \def\lowerleftsingleninequote {,}
-% %D \def\lowerleftdoubleninequote {,\kern-.1em,}
-% %D \def\upperleftsingleninequote {'}
-% %D \def\upperleftdoubleninequote {''\kern-.1em}
-% %D \def\upperleftsinglesixquote {`}
-% %D \def\upperleftdoublesixquote {`\kern-.1em`}
-% %D
-% %D \def\lowerrightsingleninequote {,}
-% %D \def\lowerrightdoubleninequote {,\kern-.1em,}
-% %D \def\upperrightsingleninequote {'}
-% %D \def\upperrightdoubleninequote {''}
-% %D \def\upperrightsinglesixquote {`}
-% %D \def\upperrightdoublesixquote {\kern-.125em``}
-% %D \stoptyping
-% %D
-% %D But in other fonts, these definitions can give problems, so
-% %D we just say:
-%
-% \def\lowerleftsingleninequote {,}
-% \def\lowerleftdoubleninequote {,,}
-% \def\upperleftsingleninequote {'}
-% \def\upperleftdoubleninequote {''}
-% \def\upperleftsinglesixquote {`}
-% \def\upperleftdoublesixquote {``}
-%
-% \def\lowerrightsingleninequote {,}
-% \def\lowerrightdoubleninequote {,,}
-% \def\upperrightsingleninequote {'}
-% \def\upperrightdoubleninequote {''}
-% \def\upperrightsinglesixquote {`}
-% \def\upperrightdoublesixquote {``}
-%
-% %D Yes I know, they are ugly:
-%
-% \def\leftfakeguillemot
-% {\dontleavehmode\hbox{\raise.25ex\hbox{$\scriptscriptstyle\ll$}}}
-%
-% \def\rightfakeguillemot
-% {\hbox{\raise.25ex\hbox{$\scriptscriptstyle\gg$}}}
-%
-% \def\leftsubfakeguillemot
-% {\dontleavehmode\hbox{\raise.25ex\hbox{$\scriptscriptstyle<$}}}
-%
-% \def\rightsubfakeguillemot
-% {\hbox{\raise.25ex\hbox{$\scriptscriptstyle>$}}}
+\def\dolanguageparameter#1#2%
+ {\csname\??la
+ \ifcsname\??la\currentlanguage#2\endcsname
+ \currentlanguage
+ \else\ifcsname\??la#1#2\endcsname
+ \@EA\ifx\csname\??la#1#2\endcsname\empty\s!default\else#1\fi
+ \else
+ \s!default
+ \fi\fi
+ #2\endcsname}
+
+\def\dospecificlanguageparameter#1#2#3%
+ {\csname\??la
+ \ifcsname\??la#2#3\endcsname
+ \@EA\ifx\csname\??la#2#3\endcsname\empty\s!default\else#2\fi
+ \else\ifcsname\??la#1#3\endcsname
+ \@EA\ifx\csname\??la#1#3\endcsname\empty\s!default\else#1\fi
+ \else
+ \s!default
+ \fi\fi
+ #3\endcsname}
%D New (see nomarking and nolist):
@@ -987,13 +662,14 @@
\def\nopatterns{\normallanguage\minusone}
+%D Mark plugin:
+
+\loadmarkfile{lang-ini} % not yet
+
%D We default to the language belonging to the interface. This
%D is one of the few places outside the interface modules where
%D \type{\startinterface} is used.
-\let\normaldoublequote ="
-\let\normalforwardslash=/
-
%D We default to english:
\setupcurrentlanguage[\s!en]
@@ -1002,8 +678,4 @@
\appendtoks\showmessage\m!linguals9\currentlanguage\to\everyjob
-%D Brrr:
-
-% \ifx\@@ladefault\undefined \let\@@ladefault\s!en \fi
-
\protect \endinput
diff --git a/tex/context/base/lang-sla.tex b/tex/context/base/lang-sla.tex
index 330c6f7f5..268a9c332 100644
--- a/tex/context/base/lang-sla.tex
+++ b/tex/context/base/lang-sla.tex
@@ -20,7 +20,7 @@
%D us an email.
%D
%D \starttabulate[|lB|l|]
-%D \NC Czech \NC Tom Hidec, Petr Sojka \NC \NR
+%D \NC Czech \NC Tom Hudec, Petr Sojka \NC \NR
%D \NC Polish \NC Grzegorz Sapijaszko \NC \NR
%D \NC Croatian \NC \Zcaron eljko Vrba \NC \NR
%D \NC Slovenian \NC Mojca Miklavec \NC \NR
diff --git a/tex/context/base/luat-cbk.lua b/tex/context/base/luat-cbk.lua
index efb534d7d..a22c70acb 100644
--- a/tex/context/base/luat-cbk.lua
+++ b/tex/context/base/luat-cbk.lua
@@ -99,7 +99,8 @@ garbagecollector = { }
do
local level = 0
- collectgarbage("setstepmul", 165)
+--~ collectgarbage("setstepmul", 165)
+--~ collectgarbage("setstepmul",50)
garbagecollector.trace = false
garbagecollector.tune = false -- for the moment
diff --git a/tex/context/base/luat-inp.lua b/tex/context/base/luat-inp.lua
index 541bde5c3..ba5d97c29 100644
--- a/tex/context/base/luat-inp.lua
+++ b/tex/context/base/luat-inp.lua
@@ -19,6 +19,7 @@
-- Beware, loading and saving is overloaded in luat-tmp!
-- todo: instances.[hashes,cnffiles,configurations,522] -> ipairs (alles check, sneller)
+-- todo: check escaping in find etc, too much, too slow
if not versions then versions = { } end versions['luat-inp'] = 1.001
if not environment then environment = { } end
@@ -256,31 +257,36 @@ input.settrace(tonumber(os.getenv("MTX.INPUT.TRACE") or os.getenv("MTX_INPUT_TRA
-- These functions can be used to test the performance, especially
-- loading the database files.
-function input.start_timing(instance)
- if instance then
- instance.starttime = os.clock()
- if not instance.loadtime then
- instance.loadtime = 0
+do
+ local clock = os.clock
+
+ function input.starttiming(instance)
+ if instance then
+ instance.starttime = clock()
+ if not instance.loadtime then
+ instance.loadtime = 0
+ end
end
end
-end
-function input.stop_timing(instance, report)
- if instance and instance.starttime then
- instance.stoptime = os.clock()
- local loadtime = instance.stoptime - instance.starttime
- instance.loadtime = instance.loadtime + loadtime
- if report then
- input.report('load time', string.format("%0.3f",loadtime))
+ function input.stoptiming(instance, report)
+ if instance then
+ local starttime = instance.starttime
+ if starttime then
+ local stoptime = clock()
+ local loadtime = stoptime - starttime
+ instance.stoptime = stoptime
+ instance.loadtime = instance.loadtime + loadtime
+ if report then
+ input.report('load time', string.format("%0.3f",loadtime))
+ end
+ return loadtime
+ end
end
- return loadtime
- else
return 0
end
-end
-input.stoptiming = input.stop_timing
-input.starttiming = input.start_timing
+end
function input.elapsedtime(instance)
return string.format("%0.3f",instance.loadtime or 0)
@@ -594,99 +600,106 @@ function input.generatedatabase(instance,specification)
return input.methodhandler('generators', instance, specification)
end
-function input.generators.tex(instance,specification)
- local tag = specification
- if not instance.lsrmode and lfs and lfs.dir then
- input.report("scanning path",specification)
- instance.files[tag] = { }
- local files = instance.files[tag]
- local n, m, r = 0, 0, 0
- local spec = specification .. '/'
- local attributes = lfs.attributes
- local directory = lfs.dir
- local small = instance.smallcache
- local function action(path)
- local mode, full
- if path then
- full = spec .. path .. '/'
- else
- full = spec
- end
- for name in directory(full) do
- if name:find("^%.") then
- -- skip
- elseif name:find("[%~%`%!%#%$%%%^%&%*%(%)%=%{%}%[%]%:%;\"\'%|%|%<%>%,%?\n\r\t]") then
- -- texio.write_nl("skipping " .. name)
- -- skip
+do
+
+ local weird = lpeg.anywhere(lpeg.S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+
+ function input.generators.tex(instance,specification)
+ local tag = specification
+ if not instance.lsrmode and lfs and lfs.dir then
+ input.report("scanning path",specification)
+ instance.files[tag] = { }
+ local files = instance.files[tag]
+ local n, m, r = 0, 0, 0
+ local spec = specification .. '/'
+ local attributes = lfs.attributes
+ local directory = lfs.dir
+ local small = instance.smallcache
+ local function action(path)
+ local mode, full
+ if path then
+ full = spec .. path .. '/'
else
- mode = attributes(full..name,'mode')
- if mode == "directory" then
- m = m + 1
- if path then
- action(path..'/'..name)
- else
- action(name)
- end
- elseif path and mode == 'file' then
- n = n + 1
- local f = files[name]
- if f then
- if not small then
- if type(f) == 'string' then
- files[name] = { f, path }
- else
- f[#f+1] = path
- end
+ full = spec
+ end
+ for name in directory(full) do
+ if name:find("^%.") then
+ -- skip
+ -- elseif name:find("[%~%`%!%#%$%%%^%&%*%(%)%=%{%}%[%]%:%;\"\'%|%<%>%,%?\n\r\t]") then -- too much escaped
+ elseif weird:match(name) then
+ -- texio.write_nl("skipping " .. name)
+ -- skip
+ else
+ mode = attributes(full..name,'mode')
+ if mode == "directory" then
+ m = m + 1
+ if path then
+ action(path..'/'..name)
+ else
+ action(name)
end
- else
- files[name] = path
- local lower = name:lower()
- if name ~= lower then
- files["remap:"..lower] = name
- r = r + 1
+ elseif path and mode == 'file' then
+ n = n + 1
+ local f = files[name]
+ if f then
+ if not small then
+ if type(f) == 'string' then
+ files[name] = { f, path }
+ else
+ f[#f+1] = path
+ end
+ end
+ else
+ files[name] = path
+ local lower = name:lower()
+ if name ~= lower then
+ files["remap:"..lower] = name
+ r = r + 1
+ end
end
end
end
end
end
- end
- action()
- input.report(string.format("%s files found on %s directories with %s uppercase remappings",n,m,r))
- else
- local fullname = file.join(specification,input.lsrname)
- local path = '.'
- local f = io.open(fullname)
- if f then
- instance.files[tag] = { }
- local files = instance.files[tag]
- local small = instance.smallcache
- input.report("loading lsr file",fullname)
- -- for line in f:lines() do -- much slower then the next one
- for line in (f:read("*a")):gmatch("(.-)\n") do
- if line:find("^[%a%d]") then
- local fl = files[line]
- if fl then
- if not small then
- if type(fl) == 'string' then
- files[line] = { fl, path } -- table
- else
- fl[#fl+1] = path
+ action()
+ input.report(string.format("%s files found on %s directories with %s uppercase remappings",n,m,r))
+ else
+ local fullname = file.join(specification,input.lsrname)
+ local path = '.'
+ local f = io.open(fullname)
+ if f then
+ instance.files[tag] = { }
+ local files = instance.files[tag]
+ local small = instance.smallcache
+ input.report("loading lsr file",fullname)
+ -- for line in f:lines() do -- much slower then the next one
+ for line in (f:read("*a")):gmatch("(.-)\n") do
+ if line:find("^[%a%d]") then
+ local fl = files[line]
+ if fl then
+ if not small then
+ if type(fl) == 'string' then
+ files[line] = { fl, path } -- table
+ else
+ fl[#fl+1] = path
+ end
+ end
+ else
+ files[line] = path -- string
+ local lower = line:lower()
+ if line ~= lower then
+ files["remap:"..lower] = line
end
end
else
- files[line] = path -- string
- local lower = line:lower()
- if line ~= lower then
- files["remap:"..lower] = line
- end
+ path = line:match("%.%/(.-)%:$") or path -- match could be nil due to empty line
end
- else
- path = line:match("%.%/(.-)%:$") or path -- match could be nil due to empty line
end
+ f:close()
end
- f:close()
end
end
+
end
-- savers, todo
@@ -1109,10 +1122,168 @@ end
-- a,b,c/{p,q,r}/d/{x,y,z}//
-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+
+-- this one is better and faster, but it took me a while to realize
+-- that this kind of replacement is cleaner than messy parsing and
+-- fuzzy concatenating we can probably gain a bit with selectively
+-- applying lpeg, but experiments with lpeg parsing this proved not to
+-- work that well; the parsing is ok, but dealing with the resulting
+-- table is a pain because we need to work inside-out recursively
+
+--~ function input.aux.splitpathexpr(str, t, validate)
+--~ -- no need for optimization, only called a few times, we can use lpeg for the sub
+--~ t = t or { }
+--~ while true do
+--~ local done = false
+--~ while true do
+--~ ok = false
+--~ str = str:gsub("([^{},]+){([^{}]-)}", function(a,b)
+--~ local t = { }
+--~ for s in b:gmatch("([^,]+)") do
+--~ t[#t+1] = a .. s
+--~ end
+--~ ok, done = true, true
+--~ return "{" .. table.concat(t,",") .. "}"
+--~ end)
+--~ if not ok then break end
+--~ end
+--~ while true do
+--~ ok = false
+--~ str = str:gsub("{([^{}]-)}([^{},]+)", function(a,b)
+--~ local t = { }
+--~ for s in a:gmatch("([^,]+)") do
+--~ t[#t+1] = s .. b
+--~ end
+--~ ok, done = true, true
+--~ return "{" .. table.concat(t,",") .. "}"
+--~ end)
+--~ if not ok then break end
+--~ end
+--~ while true do
+--~ ok = false
+--~ str = str:gsub("([,{]){([^{}]+)}([,}])", function(a,b,c)
+--~ ok, done = true, true
+--~ return a .. b .. c
+--~ end)
+--~ if not ok then break end
+--~ end
+--~ if not done then break end
+--~ end
+--~ while true do
+--~ ok = false
+--~ str = str:gsub("{([^{}]-)}{([^{}]-)}", function(a,b)
+--~ local t = { }
+--~ for sa in a:gmatch("([^,]+)") do
+--~ for sb in b:gmatch("([^,]+)") do
+--~ t[#t+1] = sa .. sb
+--~ end
+--~ end
+--~ ok = true
+--~ return "{" .. table.concat(t,",") .. "}"
+--~ end)
+--~ if not ok then break end
+--~ end
+--~ while true do
+--~ ok = false
+--~ str = str:gsub("{([^{}]-)}", function(a)
+--~ ok = true
+--~ return a
+--~ end)
+--~ if not ok then break end
+--~ end
+--~ if validate then
+--~ for s in str:gmatch("([^,]+)") do
+--~ s = validate(s)
+--~ if s then t[#t+1] = s end
+--~ end
+--~ else
+--~ for s in str:gmatch("([^,]+)") do
+--~ t[#t+1] = s
+--~ end
+--~ end
+--~ return t
+--~ end
+
+function input.aux.splitpathexpr(str, t, validate)
+ -- no need for optimization, only called a few times, we can use lpeg for the sub
+ t = t or { }
+ local concat = table.concat
+ while true do
+ local done = false
+ while true do
+ ok = false
+ str = str:gsub("([^{},]+){([^{}]-)}", function(a,b)
+ local t = { }
+ b:piecewise(",", function(s) t[#t+1] = a .. s end)
+ ok, done = true, true
+ return "{" .. concat(t,",") .. "}"
+ end)
+ if not ok then break end
+ end
+ while true do
+ ok = false
+ str = str:gsub("{([^{}]-)}([^{},]+)", function(a,b)
+ local t = { }
+ a:piecewise(",", function(s) t[#t+1] = s .. b end)
+ ok, done = true, true
+ return "{" .. concat(t,",") .. "}"
+ end)
+ if not ok then break end
+ end
+ while true do
+ ok = false
+ str = str:gsub("([,{]){([^{}]+)}([,}])", function(a,b,c)
+ ok, done = true, true
+ return a .. b .. c
+ end)
+ if not ok then break end
+ end
+ if not done then break end
+ end
+ while true do
+ ok = false
+ str = str:gsub("{([^{}]-)}{([^{}]-)}", function(a,b)
+ local t = { }
+ a:piecewise(",", function(sa)
+ b:piecewise(",", function(sb)
+ t[#t+1] = sa .. sb
+ end)
+ end)
+ ok = true
+ return "{" .. concat(t,",") .. "}"
+ end)
+ if not ok then break end
+ end
+ while true do
+ ok = false
+ str = str:gsub("{([^{}]-)}", function(a)
+ ok = true
+ return a
+ end)
+ if not ok then break end
+ end
+ if validate then
+ str:piecewise(",", function(s)
+ s = validate(s)
+ if s then t[#t+1] = s end
+ end)
+ else
+ str:piecewise(",", function(s)
+ t[#t+1] = s
+ end)
+ end
+ return t
+end
function input.aux.expanded_path(instance,pathlist)
-- a previous version fed back into pathlist
- local i, n, oldlist, newlist, ok = 0, 0, { }, { }, false
+ local newlist, ok = { }, false
for _,v in ipairs(pathlist) do
if v:find("[{}]") then
ok = true
@@ -1120,45 +1291,11 @@ function input.aux.expanded_path(instance,pathlist)
end
end
if ok then
- for _,v in ipairs(pathlist) do
- oldlist[#oldlist+1] = (v:gsub("([\{\}])", function(p)
- if p == "{" then
- i = i + 1
- if i > n then n = i end
- return "<" .. (i-1) .. ">"
- else
- i = i - 1
- return "</" .. i .. ">"
- end
- end))
- end
- for i=1,n do
- while true do
- local more = false
- local pattern = "^(.-)<"..(n-i)..">(.-)</"..(n-i)..">(.-)$"
- local t = { }
- for _,v in ipairs(oldlist) do
- local pre, mid, post = v:match(pattern)
- if pre and mid and post then
- more = true
- for vv in string.gmatch(mid..',',"(.-),") do
- if vv == '.' then
- t[#t+1] = pre..post
- else
- t[#t+1] = pre..vv..post
- end
- end
- else
- t[#t+1] = v
- end
- end
- oldlist = t
- if not more then break end
- end
- end
- for _,v in ipairs(oldlist) do
- v = file.collapse_path(v)
- if v ~= "" and not v:find(instance.dummy_path_expr) then newlist[#newlist+1] = v end
+ for _, v in ipairs(pathlist) do
+ input.aux.splitpathexpr(v, newlist, function(s)
+ s = file.collapse_path(s)
+ return s ~= "" and not s:find(instance.dummy_path_expr) and s
+ end)
end
else
for _,v in ipairs(pathlist) do
@@ -1171,6 +1308,83 @@ function input.aux.expanded_path(instance,pathlist)
return newlist
end
+--~ old one, imperfect and not that efficient
+--~
+--~ function input.aux.expanded_path(instance,pathlist)
+--~ -- a previous version fed back into pathlist
+--~ local i, n, oldlist, newlist, ok = 0, 0, { }, { }, false
+--~ for _,v in ipairs(pathlist) do
+--~ if v:find("[{}]") then
+--~ ok = true
+--~ break
+--~ end
+--~ end
+--~ if ok then
+--~ for _,v in ipairs(pathlist) do
+--~ oldlist[#oldlist+1] = (v:gsub("([\{\}])", function(p)
+--~ if p == "{" then
+--~ i = i + 1
+--~ if i > n then n = i end
+--~ return "<" .. (i-1) .. ">"
+--~ else
+--~ i = i - 1
+--~ return "</" .. i .. ">"
+--~ end
+--~ end))
+--~ end
+--~ for i=1,n do
+--~ while true do
+--~ local more = false
+--~ local pattern = "^(.-)<"..(n-i)..">(.-)</"..(n-i)..">(.-)$"
+--~ local t = { }
+--~ for _,v in ipairs(oldlist) do
+--~ local pre, mid, post = v:match(pattern)
+--~ if pre and mid and post then
+--~ more = true
+--~ for vv in string.gmatch(mid..',',"(.-),") do -- (mid, "([^,]+)")
+--~ if vv == '.' then
+--~ t[#t+1] = pre..post
+--~ else
+--~ t[#t+1] = pre..vv..post
+--~ end
+--~ end
+--~ else
+--~ t[#t+1] = v
+--~ end
+--~ end
+--~ oldlist = t
+--~ if not more then break end
+--~ end
+--~ end
+--~ if true then
+--~ -- many dups are possible due to messy resolve / order can be messed up too, brr !
+--~ local ok = { }
+--~ for _,o in ipairs(oldlist) do
+--~ for v in o:gmatch("([^,]+)") do
+--~ if not ok[v] then
+--~ ok[v] = true
+--~ v = file.collapse_path(v)
+--~ if v ~= "" and not v:find(instance.dummy_path_expr) then newlist[#newlist+1] = v end
+--~ end
+--~ end
+--~ end
+--~ else
+--~ for _,v in ipairs(oldlist) do
+--~ v = file.collapse_path(v)
+--~ if v ~= "" and not v:find(instance.dummy_path_expr) then newlist[#newlist+1] = v end
+--~ end
+--~ end
+--~ else
+--~ for _,v in ipairs(pathlist) do
+--~ for vv in string.gmatch(v..',',"(.-),") do
+--~ vv = file.collapse_path(v)
+--~ if vv ~= "" then newlist[#newlist+1] = vv end
+--~ end
+--~ end
+--~ end
+--~ return newlist
+--~ end
+
--~ function input.is_readable(name) -- brrr, get rid of this
--~ return name:find("^zip##") or file.is_readable(name)
--~ end
@@ -1269,24 +1483,51 @@ function input.suffixes_of_format(str)
end
end
-function input.aux.qualified_path(filename) -- make platform dependent / not good yet
- return
- filename:find("^%.+/") or
- filename:find("^/") or
- filename:find("^%a+%:") or
- filename:find("^%a+##")
-end
+--~ function input.aux.qualified_path(filename) -- make platform dependent / not good yet
+--~ return
+--~ filename:find("^%.+/") or
+--~ filename:find("^/") or
+--~ filename:find("^%a+%:") or
+--~ filename:find("^%a+##")
+--~ end
+
+--~ function input.normalize_name(original)
+--~ -- internally we use type##spec##subspec ; this hackery slightly slows down searching
+--~ local str = original or ""
+--~ str = str:gsub("::", "##") -- :: -> ##
+--~ str = str:gsub("^(%a+)://" ,"%1##") -- zip:// -> zip##
+--~ str = str:gsub("(.+)##(.+)##/(.+)","%1##%2##%3") -- ##/spec -> ##spec
+--~ if (input.trace>1) and (original ~= str) then
+--~ input.logger('= normalizer',original.." -> "..str)
+--~ end
+--~ return str
+--~ end
+
+do -- called about 700 times for an empty doc (font initializations etc)
+ -- i need to weed the font files for redundant calls
-function input.normalize_name(original)
- -- internally we use type##spec##subspec ; this hackery slightly slows down searching
- local str = original or ""
- str = str:gsub("::", "##") -- :: -> ##
- str = str:gsub("^(%a+)://" ,"%1##") -- zip:// -> zip##
- str = str:gsub("(.+)##(.+)##/(.+)","%1##%2##%3") -- ##/spec -> ##spec
- if (input.trace>1) and (original ~= str) then
- input.logger('= normalizer',original.." -> "..str)
+ local letter = lpeg.R("az","AZ")
+ local separator = lpeg.P("##")
+
+ local qualified = lpeg.P(".")^0 * lpeg.P("/") + letter*lpeg.P(":") + letter^1*separator
+ local normalized = lpeg.Cs(
+ (letter^1*(lpeg.P("://")/"##") * (1-lpeg.P(false))^1) +
+ (lpeg.P("::")/"##" + (1-separator)^1*separator*(1-separator)^1*separator*(lpeg.P("/")/"") + 1)^0
+ )
+
+ -- ./name ../name /name c: zip## (todo: use url internally and get rid of ##)
+ function input.aux.qualified_path(filename)
+ return qualified:match(filename)
+ end
+
+ -- zip:// -> zip## ; :: -> ## ; aa##bb##/cc -> aa##bb##cc
+ function input.normalize_name(original)
+ local str = normalized:match(original or "")
+ if input.trace > 1 and original ~= str then
+ input.logger('= normalizer',original.." -> "..str)
+ end
+ return str
end
- return str
end
-- split the next one up, better for jit
@@ -1651,13 +1892,13 @@ function input.automount(instance)
end
function input.load(instance)
- input.start_timing(instance)
+ input.starttiming(instance)
input.identify_cnf(instance)
input.load_cnf(instance)
input.expand_variables(instance)
input.load_hash(instance)
input.automount(instance)
- input.stop_timing(instance)
+ input.stoptiming(instance)
end
function input.for_files(instance, command, files, filetype, mustexist)
diff --git a/tex/context/base/luat-lib.tex b/tex/context/base/luat-lib.tex
index 84b5bcfff..d557ca4b6 100644
--- a/tex/context/base/luat-lib.tex
+++ b/tex/context/base/luat-lib.tex
@@ -35,6 +35,7 @@
% this will change once we have proper write overloads
\registerctxluafile{l-string} {1.001}
+\registerctxluafile{l-lpeg} {1.001}
\registerctxluafile{l-boolean}{1.001}
\registerctxluafile{l-number} {1.001}
\registerctxluafile{l-math} {1.001}
@@ -48,6 +49,7 @@
\registerctxluafile{l-unicode}{1.001}
\registerctxluafile{l-utils} {1.001}
\registerctxluafile{l-tex} {1.001}
+\registerctxluafile{l-url} {1.001}
\registerctxluafile{l-xml} {1.001}
%registerctxluafile{l-xmlctx} {1.001}
diff --git a/tex/context/base/luat-lmx.lua b/tex/context/base/luat-lmx.lua
index 672db3ca6..75a7098c1 100644
--- a/tex/context/base/luat-lmx.lua
+++ b/tex/context/base/luat-lmx.lua
@@ -32,7 +32,7 @@ end
lmx.converting = false
-function lmx.convert(template,result) -- use lpeg instead
+function lmx.convert(template,result) -- todo: use lpeg instead
if not lmx.converting then -- else, if error then again tex error and loop
local data = input.texdatablob(texmf.instance, template)
local f = false
@@ -46,7 +46,7 @@ function lmx.convert(template,result) -- use lpeg instead
return lmx.variables[str] or ""
end
function lmx.escape(str)
- return string.gsub(string.gsub(str,'&','&amp;'),'[<>"]',lmx.escapes)
+ return string.gsub(str:gsub('&','&amp;'),'[<>"]',lmx.escapes)
end
function lmx.type(str)
if str then lmx.print("<tt>" .. lmx.escape(str) .. "</tt>") end
@@ -57,18 +57,18 @@ function lmx.convert(template,result) -- use lpeg instead
function lmx.tv(str)
lmx.type(lmx.variable(str))
end
- data = string.gsub(data, "<%?lmx%-include%s+(.-)%s-%?>", function(filename)
+ data = data:gsub("<%?lmx%-include%s+(.-)%s-%?>", function(filename)
return lmx.loadedfile(filename)
end)
local definitions = { }
- data = string.gsub(data, "<%?lmx%-define%-begin%s+(%S-)%s-%?>(.-)<%?lmx%-define%-end%s-%?>", function(tag,content)
+ data = data:gsub("<%?lmx%-define%-begin%s+(%S-)%s-%?>(.-)<%?lmx%-define%-end%s-%?>", function(tag,content)
definitions[tag] = content
return ""
end)
- data = string.gsub(data, "<%?lmx%-resolve%s+(%S-)%s-%?>", function(tag)
+ data = data:gsub("<%?lmx%-resolve%s+(%S-)%s-%?>", function(tag)
return definitions[tag] or ""
end)
- data = string.gsub(data, "%c%s-(<%?lua .-%?>)%s-%c", function(lua)
+ data = data:gsub("%c%s-(<%?lua .-%?>)%s-%c", function(lua)
return "\n" .. lua .. " "
end)
data = string.gsub(data .. "<?lua ?>","(.-)<%?lua%s+(.-)%?>", function(txt, lua)
diff --git a/tex/context/base/luat-log.lua b/tex/context/base/luat-log.lua
index faecf7e29..12cf45c48 100644
--- a/tex/context/base/luat-log.lua
+++ b/tex/context/base/luat-log.lua
@@ -45,7 +45,12 @@ logs.tex = logs.tex or { }
logs.level = 0
do
- local write_nl, write, format = texio.write_nl or print, texio.write or print, string.format
+ local write_nl, write, format = texio.write_nl or print, texio.write or io.write, string.format
+
+ if texlua then
+ write_nl = print
+ write = io.write
+ end
function logs.xml.debug(category,str)
if logs.level > 3 then write_nl(format("<d category='%s'>%s</d>",category,str)) end
diff --git a/tex/context/base/luat-tex.lua b/tex/context/base/luat-tex.lua
index 9127ab9da..591f3af20 100644
--- a/tex/context/base/luat-tex.lua
+++ b/tex/context/base/luat-tex.lua
@@ -92,18 +92,22 @@ if texconfig and not texlua then
else
input.logger('+ ' .. tag .. ' opener',filename)
-- todo: file;name -> freeze / eerste regel scannen -> freeze
+ local filters = input.filters
t = {
reader = function(self)
local line = file_handle:read()
if line == "" then
return ""
- elseif input.filters.utf_translator then
- return input.filters.utf_translator(line)
- elseif input.filters.dynamic_translator then
- return input.filters.dynamic_translator(line)
- else
- return line
end
+ local translator = filters.utf_translator
+ if translator then
+ return translator(line)
+ end
+ translator = filters.dynamic_translator
+ if translator then
+ return translator(line)
+ end
+ return line
end,
close = function()
input.logger('= ' .. tag .. ' closer',filename)
@@ -254,8 +258,8 @@ if texconfig and not texlua then
function input.register_start_actions(f) table.insert(input.start_actions, f) end
function input.register_stop_actions (f) table.insert(input.stop_actions, f) end
---~ callback.register('start_run', function() for _, a in pairs(input.start_actions) do a() end end)
---~ callback.register('stop_run' , function() for _, a in pairs(input.stop_actions ) do a() end end)
+ --~ callback.register('start_run', function() for _, a in pairs(input.start_actions) do a() end end)
+ --~ callback.register('stop_run' , function() for _, a in pairs(input.stop_actions ) do a() end end)
end
@@ -397,3 +401,100 @@ function cs.testcase(b)
tex.sprint(tex.texcatcodes, "\\secondoftwoarguments")
end
end
+
+-- This is not the most ideal place, but it will do. Maybe we need to move
+-- attributes to node-att.lua.
+
+if node then
+
+ nodes = nodes or { }
+
+ do
+
+ -- just for testing
+
+ local reserved = { }
+
+ function nodes.register(n)
+ reserved[#reserved+1] = n
+ end
+
+ function nodes.cleanup_reserved(nofboxes) -- todo
+ local nr, free = #reserved, node.free
+ for i=1,nr do
+ free(reserved[i])
+ end
+ local nl, tb, flush = 0, tex.box, node.flush_list
+ if nofboxes then
+ for i=1,nofboxes do
+ local l = tb[i]
+ if l then
+ flush(l)
+ tb[i] = nil
+ nl = nl + 1
+ end
+ end
+ end
+ reserved = { }
+ return nr, nl, nofboxes
+ end
+
+ -- nodes.register = function() end
+ -- nodes.cleanup_reserved = function() end
+
+ end
+
+ do
+
+ local pdfliteral = node.new("whatsit",8) pdfliteral.next, pdfliteral.prev = nil, nil pdfliteral.mode = 1
+ local disc = node.new("disc") disc.next, disc.prev = nil, nil
+ local kern = node.new("kern",1) kern.next, kern.prev = nil, nil
+ local penalty = node.new("penalty") penalty.next, penalty.prev = nil, nil
+ local glue = node.new("glue") glue.next, glue.prev = nil, nil
+ local glue_spec = node.new("glue_spec") glue_spec.next, glue_spec.prev = nil, nil
+
+ nodes.register(pdfliteral)
+ nodes.register(disc)
+ nodes.register(kern)
+ nodes.register(penalty)
+ nodes.register(glue)
+ nodes.register(glue_spec)
+
+ local copy = node.copy
+
+ function nodes.penalty(p)
+ local n = copy(penalty)
+ n.penalty = p
+ return n
+ end
+ function nodes.kern(k)
+ local n = copy(kern)
+ n.kern = k
+ return n
+ end
+ function nodes.glue(width,stretch,shrink)
+ local n = copy(glue)
+ local s = copy(glue_spec)
+ s.width, s.stretch, s.shrink = width, stretch, shrink
+ n.spec = s
+ return n
+ end
+ function nodes.glue_spec(width,stretch,shrink)
+ local s = copy(glue_spec)
+ s.width, s.stretch, s.shrink = width, stretch, shrink
+ return s
+ end
+
+ function nodes.disc()
+ return copy(disc)
+ end
+
+ function nodes.pdfliteral(str)
+ local t = copy(pdfliteral)
+ t.data = str
+ return t
+ end
+
+ end
+
+end
diff --git a/tex/context/base/luat-tmp.lua b/tex/context/base/luat-tmp.lua
index 58a195986..bd29f87f4 100644
--- a/tex/context/base/luat-tmp.lua
+++ b/tex/context/base/luat-tmp.lua
@@ -22,7 +22,7 @@ being written at the same time is small. We also need to extend
luatools with a recache feature.</p>
--ldx]]--
-caches = caches or { }
+caches = caches or { }
dir = dir or { }
texmf = texmf or { }
@@ -34,9 +34,20 @@ caches.trace = false
caches.tree = false
caches.temp = caches.temp or os.getenv("TEXMFCACHE") or os.getenv("HOME") or os.getenv("HOMEPATH") or os.getenv("VARTEXMF") or os.getenv("TEXMFVAR") or os.getenv("TMP") or os.getenv("TEMP") or os.getenv("TMPDIR") or nil
caches.paths = caches.paths or { caches.temp }
+caches.force = false
+input.usecache = not toboolean(os.getenv("TEXMFSHARECACHE") or "false",true) -- true
+
+if caches.temp and caches.temp ~= "" and lfs.attributes(caches.temp,"mode") ~= "directory" then
+ if caches.force or io.ask(string.format("Should I create the cache path %s?",caches.temp), "no", { "yes", "no" }) == "yes" then
+ lfs.mkdirs(caches.temp)
+ end
+end
if not caches.temp or caches.temp == "" then
- print("\nFATAL ERROR: NO VALID TEMPORARY PATH\n")
+ print("\nfatal error: there is no valid cache path defined\n")
+ os.exit()
+elseif lfs.attributes(caches.temp,"mode") ~= "directory" then
+ print(string.format("\nfatal error: cache path %s is not a directory\n",caches.temp))
os.exit()
end
@@ -223,8 +234,6 @@ end
-- since we want to use the cache instead of the tree, we will now
-- reimplement the saver.
-input.usecache = true
-
function input.aux.save_data(instance, dataname, check)
for cachename, files in pairs(instance[dataname]) do
local name
@@ -420,8 +429,8 @@ end
function input.storage.dump()
for name, data in ipairs(input.storage.data) do
local evaluate, message, original, target = data[1], data[2], data[3] ,data[4]
- local name, initialize, finalize = nil, "", ""
- for str in string.gmatch(target,"([^%.]+)") do
+ local name, initialize, finalize, code = nil, "", "", ""
+ for str in target:gmatch("([^%.]+)") do
if name then
name = name .. "." .. str
else
@@ -435,15 +444,15 @@ function input.storage.dump()
input.storage.max = input.storage.max + 1
if input.storage.trace then
logs.report('storage',string.format('saving %s in slot %s',message,input.storage.max))
- lua.bytecode[input.storage.max] = loadstring(
+ code =
initialize ..
string.format("logs.report('storage','restoring %s from slot %s') ",message,input.storage.max) ..
table.serialize(original,name) ..
finalize
- )
else
- lua.bytecode[input.storage.max] = loadstring(initialize .. table.serialize(original,name) .. finalize)
+ code = initialize .. table.serialize(original,name) .. finalize
end
+ lua.bytecode[input.storage.max] = loadstring(code)
end
end
diff --git a/tex/context/base/luat-tra.lua b/tex/context/base/luat-tra.lua
index f5c077f41..7fc973195 100644
--- a/tex/context/base/luat-tra.lua
+++ b/tex/context/base/luat-tra.lua
@@ -20,8 +20,8 @@ do
if counters[f] == nil then
counters[f] = 1
--~ names[f] = debug.getinfo(2,"Sn")
---~ names[f] = debug.getinfo(2,"n")
- names[f] = debug.getinfo(f)
+ names[f] = debug.getinfo(2,"n")
+--~ names[f] = debug.getinfo(f)
else
counters[f] = counters[f] + 1
end
@@ -51,8 +51,11 @@ do
printer("\n") -- ugly but ok
for func, count in pairs(counters) do
if count > threshold then
- printer(string.format("%8i %s\n", count, getname(func)))
- total = total + count
+ local name = getname(func)
+ if name ~= "(for generator)" then
+ printer(string.format("%8i %s\n", count, getname(func)))
+ total = total + count
+ end
end
grandtotal = grandtotal + count
functions = functions + 1
@@ -78,7 +81,12 @@ do
end
function debugger.tracing()
- return tonumber((os.env['MTX.TRACE.CALLS'] or os.env['MTX_TRACE_CALLS'] or 0)) > 0
+ local n = tonumber(os.env['MTX.TRACE.CALLS']) or tonumber(os.env['MTX_TRACE_CALLS']) or 0
+ if n > 0 then
+ function debugger.tracing() return true end ; return true
+ else
+ function debugger.tracing() return false end ; return false
+ end
end
end
diff --git a/tex/context/base/luat-zip.lua b/tex/context/base/luat-zip.lua
index fbf00a47e..c5a63aeed 100644
--- a/tex/context/base/luat-zip.lua
+++ b/tex/context/base/luat-zip.lua
@@ -164,7 +164,7 @@ else
function input.registerzipfile(instance,zipname,tag)
if not zip.registeredfiles[zipname] then
- input.start_timing(instance)
+ input.starttiming(instance)
local z = zip.open(zipname)
if not z then
zipname = input.find_file(instance,zipname)
@@ -177,7 +177,7 @@ else
else
input.logger("? zipfile","unknown "..zipname)
end
- input.stop_timing(instance)
+ input.stoptiming(instance)
end
end
diff --git a/tex/context/base/lxml-ini.lua b/tex/context/base/lxml-ini.lua
index 67e1bc75b..69d828d3d 100644
--- a/tex/context/base/lxml-ini.lua
+++ b/tex/context/base/lxml-ini.lua
@@ -113,8 +113,8 @@ do
function lxml.verbatim(id,before,after)
local root = lxml.id(id)
- if before then tex.sprint(tex.ctxcatcodes,string.format("%s[%s]",before,id.tg)) end
- xml.serialize(root,toverbatim,nil,nil,nil,true)
+ if before then tex.sprint(tex.ctxcatcodes,string.format("%s[%s]",before,root.tg)) end
+ xml.serialize(root.dt,toverbatim,nil,nil,nil,true) -- was root
if after then tex.sprint(tex.ctxcatcodes,after) end
end
function lxml.inlineverbatim(id)
@@ -136,19 +136,36 @@ function lxml.root(id)
return lxml.loaded[id]
end
+-- redefine xml load
+
+xml.originalload = xml.load
+
+function xml.load(filename)
+ input.starttiming(lxml)
+ local x = xml.originalload(filename)
+ input.stoptiming(lxml)
+ return x
+end
+
+function lxml.filename(filename) -- some day we will do this in input, first figure out /
+ return input.find_file(texmf.instance,url.filename(filename)) or ""
+end
+
function lxml.load(id,filename)
- input.start_timing(lxml)
if texmf then
- local fullname = input.find_file(texmf.instance,filename) or ""
+ local fullname = lxml.filename(filename)
if fullname ~= "" then
filename = fullname
end
end
lxml.loaded[id] = xml.load(filename)
- input.stop_timing(lxml)
return lxml.loaded[id], filename
end
+function lxml.include(id,pattern,attribute,recurse)
+ xml.include(lxml.id(id),pattern,attribute,recurse,lxml.filename)
+end
+
function lxml.utfize(id)
xml.utfize(lxml.id(id))
end
@@ -199,7 +216,8 @@ function lxml.index(id,pattern,i)
end
function lxml.attribute(id,pattern,a,default) --todo: snelle xmlatt
- tex.sprint((xml.filters.attribute(lxml.id(id),pattern,a)) or default or "")
+ local str = xml.filters.attribute(lxml.id(id),pattern,a) or ""
+ tex.sprint((str == "" and default) or str)
end
function lxml.count(id,pattern)
@@ -217,7 +235,8 @@ function lxml.tag(id)
tex.sprint(lxml.id(id).tg or "")
end
function lxml.namespace(id) -- or remapped name?
- tex.sprint(lxml.id(id).ns or "")
+ local root = lxml.id(id)
+ tex.sprint(root.rn or root.ns or "")
end
--~ function lxml.concat(id,what,separator,lastseparator)
@@ -264,6 +283,7 @@ end
lxml.trace_setups = false
function lxml.setsetup(id,pattern,setup)
+ local trace = lxml.trace_setups
if not setup or setup == "" or setup == "*" then
for rt, dt, dk in xml.elements(lxml.id(id),pattern) do
local dtdk = dt and dt[dk] or rt
@@ -273,13 +293,25 @@ function lxml.setsetup(id,pattern,setup)
else
dtdk.command = ns .. ":" .. tg
end
- if lxml.trace_setups then
- texio.write_nl(string.format("xml setup: namespace=%s, tag=%s, setup=%s",ns, tg, dtdk.command))
+ if trace then
+ texio.write_nl(string.format("lpath matched -> %s -> %s", dtdk.command, dtdk.command))
end
end
else
+ if trace then
+ texio.write_nl(string.format("lpath pattern -> %s -> %s", pattern, setup))
+ end
for rt, dt, dk in xml.elements(lxml.id(id),pattern) do
- ((dt and dt[dk]) or rt).command = setup
+ local dtdk = (dt and dt[dk]) or rt
+ dtdk.command = setup
+ if trace then
+ local ns, tg = dtdk.rn or dtdk.ns, dtdk.tg
+ if ns == "" then
+ texio.write_nl(string.format("lpath matched -> %s -> %s", tg, setup))
+ else
+ texio.write_nl(string.format("lpath matched -> %s:%s -> %s", ns, tg, setup))
+ end
+ end
end
end
end
@@ -312,7 +344,7 @@ do
traverse(root, lpath(pattern), function(r,d,k)
-- this can become pretty large
local n = #lxml.self + 1
- lxml.self[n] = d[k]
+ lxml.self[n] = (d and d[k]) or r
tex.sprint(tex.ctxcatcodes,string.format("\\xmlsetup{%s}{%s}",n,command))
end)
end
@@ -424,9 +456,9 @@ function xml.getbuffer(name) -- we need to make sure that commands are processed
end
function lxml.loadbuffer(id,name)
- input.start_timing(lxml)
+ input.starttiming(lxml)
lxml.loaded[id] = xml.convert(table.join(buffers.data[name or id] or {},""))
- input.stop_timing(lxml)
+ input.stoptiming(lxml)
return lxml.loaded[id], name or id
end
diff --git a/tex/context/base/lxml-ini.tex b/tex/context/base/lxml-ini.tex
index 0df2d302f..8f25a2d4a 100644
--- a/tex/context/base/lxml-ini.tex
+++ b/tex/context/base/lxml-ini.tex
@@ -17,38 +17,49 @@
\unprotect
-\def\xmlload #1#2{\ctxlua{lxml.load("#1","#2")}}
-\def\xmlloadbuffer #1#2{\ctxlua{lxml.loadbuffer("#1","#2")}}
-\def\xmlutfize #1{\ctxlua{lxml.utfize("#1")}}
-\def\xmlfirst #1#2{\ctxlua{lxml.first("#1","#2")}}
-\def\xmllast #1#2{\ctxlua{lxml.last("#1","#2")}}
-\def\xmlall #1#2{\ctxlua{lxml.all("#1","#2")}}
-\def\xmlfilter #1#2{\ctxlua{lxml.filter("#1","#2")}}
-\def\xmlcommand #1#2#3{\ctxlua{lxml.command("#1","#2","#3")}}
-\def\xmlnonspace #1#2{\ctxlua{lxml.nonspace("#1","#2")}}
-\def\xmltext #1#2{\ctxlua{lxml.text("#1","#2")}}
-\def\xmlcontent #1#2{\ctxlua{lxml.content("#1","#2")}}
-\def\xmlstripped #1#2{\ctxlua{lxml.stripped("#1","#2")}}
-\def\xmlstrip #1#2{\ctxlua{lxml.strip("#1","#2")}}
-\def\xmlflush #1{\ctxlua{lxml.flush("#1")}}
-\def\xmlindex #1#2#3{\ctxlua{lxml.index("#1","#2",\number#3)}}
-\def\xmlidx #1#2#3{\ctxlua{lxml.idx("#1","#2",\number#3)}}
-\def\xmlcount #1#2{\ctxlua{lxml.count("#1","#2")}}
-\def\xmlname #1{\ctxlua{lxml.name("#1")}}
-\def\xmlnamespace #1{\ctxlua{lxml.namespace("#1")}}
-\def\xmltag #1{\ctxlua{lxml.tag("#1")}}
-\def\xmlattribute #1#2#3{\ctxlua{lxml.attribute("#1","#2","#3")}}
-\def\xmlatt #1#2{\ctxlua{lxml.attribute("#1","/","#2")}}
-\def\xmlattdef #1#2#3{\ctxlua{lxml.attribute("#1","/","#2","#3")}}
-\def\xmlconcat #1#2#3{\ctxlua{lxml.concat("#1","#2",[[\detokenize{#3}]])}}
-\def\xmlsetsetup #1#2#3{\ctxlua{lxml.setsetup("#1","#2","#3")}}
-\def\xmlsetfunction#1#2#3{\ctxlua{lxml.setaction("#1","#2",#3)}}
-\def\xmlloaddirectives #1{\ctxlua{lxml.directives.load("#1")}}
-\def\xmldirectives #1{\ctxlua{lxml.directives.setups("#1")}}
-
-\def\xmlregisterns #1#2{\ctxlua{xml.registerns("#1","#2")}} % document
+\def\xmlload #1#2{\ctxlua{lxml.load("#1","#2")}}
+\def\xmlloadbuffer #1#2{\ctxlua{lxml.loadbuffer("#1","#2")}}
+\def\xmlutfize #1{\ctxlua{lxml.utfize("#1")}}
+\def\xmlfirst #1#2{\ctxlua{lxml.first("#1","#2")}}
+\def\xmllast #1#2{\ctxlua{lxml.last("#1","#2")}}
+\def\xmlall #1#2{\ctxlua{lxml.all("#1","#2")}}
+\def\xmlfilter #1#2{\ctxlua{lxml.filter("#1","#2")}}
+\def\xmlcommand #1#2#3{\ctxlua{lxml.command("#1","#2","#3")}}
+\def\xmlnonspace #1#2{\ctxlua{lxml.nonspace("#1","#2")}}
+\def\xmltext #1#2{\ctxlua{lxml.text("#1","#2")}}
+\def\xmlcontent #1#2{\ctxlua{lxml.content("#1","#2")}}
+\def\xmlstripped #1#2{\ctxlua{lxml.stripped("#1","#2")}}
+\def\xmlstrip #1#2{\ctxlua{lxml.strip("#1","#2")}}
+\def\xmlflush #1{\ctxlua{lxml.flush("#1")}}
+\def\xmlindex #1#2#3{\ctxlua{lxml.index("#1","#2",\number#3)}}
+\def\xmlidx #1#2#3{\ctxlua{lxml.idx("#1","#2",\number#3)}}
+\def\xmlcount #1#2{\ctxlua{lxml.count("#1","#2")}}
+\def\xmlname #1{\ctxlua{lxml.name("#1")}}
+\def\xmlnamespace #1{\ctxlua{lxml.namespace("#1")}}
+\def\xmltag #1{\ctxlua{lxml.tag("#1")}}
+\def\xmlattribute #1#2#3{\ctxlua{lxml.attribute("#1","#2","#3")}}
+\def\xmlattributedef#1#2#3#4{\ctxlua{lxml.attribute("#1","#2","#3","#4")}}
+\def\xmlatt #1#2{\ctxlua{lxml.attribute("#1","/","#2")}}
+\def\xmlattdef #1#2#3{\ctxlua{lxml.attribute("#1","/","#2","#3")}}
+\def\xmlconcat #1#2#3{\ctxlua{lxml.concat("#1","#2",[[\detokenize{#3}]])}}
+\def\xmlsetsetup #1#2#3{\ctxlua{lxml.setsetup("#1","#2","#3")}}
+\def\xmlsetfunction #1#2#3{\ctxlua{lxml.setaction("#1","#2",#3)}}
+\def\xmlloaddirectives #1{\ctxlua{lxml.directives.load("#1")}}
+\def\xmldirectives #1{\ctxlua{lxml.directives.setups("#1")}}
+\def\xmlregisterns #1#2{\ctxlua{xml.registerns("#1","#2")}} % document
\def\xmlchecknamespace#1#2#3{\ctxlua{xml.check_namespace(lxml.id("#1"),"#2","#3")}} % element
\def\xmlremapname #1#2#3#4{\ctxlua{xml.remapname(lxml.id("#1"),"#2","#3","#4")}} % element
+\def\xmlremapnamespace#1#2#3{\ctxlua{xml.rename_space(lxml.id("#1"),"#2","#3")}} % document
+\def\xmldelete #1#2{\ctxlua{xml.delete(lxml.id("#1"),"#2")}}
+\def\xmlinclude #1#2#3{\ctxlua{lxml.include("#1","#2","#3",true)}}
+\def\xmldoifelse #1#2{\ctxlua{cs.testcase(xml.found(lxml.id("#1"),"#2",false))}}
+\def\xmldoifelsetext #1#2{\ctxlua{cs.testcase(xml.found(lxml.id("#1"),"#2",true ))}}
+
+% \startxmlsetups xml:include
+% \xmlinclude{main}{include}{filename|href}
+% \stopxmlsetups
+%
+% \xmlprependsetup{xml:include}
\let\xmlgrab\xmlsetsetup
@@ -58,8 +69,10 @@
\newtoks \registeredxmlsetups
-\def\xmlregistersetup#1%
- {\appendtoksonce\directsetup{#1}\to\registeredxmlsetups}
+\def\xmlappendsetup #1{\appendtoksonce \directsetup{#1}\to\registeredxmlsetups} % to be done, made faster
+\def\xmlprependsetup#1{\prependtoksonce\directsetup{#1}\to\registeredxmlsetups} % to be done, made faster
+
+\let\xmlregistersetup\xmlappendsetup
\def\xmlregisteredsetups
{\the\registeredxmlsetups}
@@ -103,7 +116,7 @@
\def\xmlcdataobeyedline {\obeyedline}
\def\xmlcdataobeyedspace{\strut\obeyedspace}
\def\xmlcdatabefore {\bgroup\tt}
-\def\xmlcdataafter {\egroup x}
+\def\xmlcdataafter {\egroup}
% verbatim (dodo:pre/post whitespace, maybe splot verbatim and
% cdata commands), experimental:
@@ -116,20 +129,26 @@
% \def\startxmlinlineverbatim [#1]{}
% \def\stopxmlinlineverbatim {}
+% we use an xml: namespace so one has to define a suitable verbatim, say
+%
+% \definetyping[xml:verbatim][typing]
+%
+% this is experimental!
+
\def\startxmldisplayverbatim[#1]%
- {\begingroup
+ {\startpacked % \begingroup
\let\currenttypingclass\??tp
- \edef\currenttyping{#1}%
+ \edef\currenttyping{xml:#1}%
\def\stopxmldisplayverbatim
{\endofverbatimlines
- \endgroup}%
+ \stoppacked} % \endgroup
\mkinitializeverbatim
\beginofverbatimlines}
\def\startxmlinlineverbatim[#1]%
{\begingroup
\let\currenttypingclass\??ty
- \edef\currenttyping{#1}%
+ \edef\currenttyping{xml:#1}%
\let\stopxmldisplayverbatim\endgroup
\mkinitializeverbatim}
diff --git a/tex/context/base/math-ext.tex b/tex/context/base/math-ext.tex
index a5dcabd20..cf332ba00 100644
--- a/tex/context/base/math-ext.tex
+++ b/tex/context/base/math-ext.tex
@@ -10,7 +10,6 @@
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%M \input math-ext
\unprotect
@@ -252,10 +251,11 @@
%D \macros{mtharrowfill,defaultmtharrowfill}
%D
%D To extend the arrows we need to define a \quotation{math arrow
-%D fill}. This command takes 7 arguments: the first four correspond
+%D fill}. This command takes 8 arguments: the first four correspond
%D the second argument of \tex{definematharrow} explained above. The
-%D other three specify the tail, body and head of the arrow.
-%D \tex{defaultmtharrowfill} has values tweaked to match latin modern
+%D other three specify the tail, body and head of the arrow. The last
+%D argument specifies the math-mode in which the arrow is drawn.
+%D \tex{defaultmtharrowfill} has values tweaked to match Latin Modern
%D fonts. For fonts that are significantly different (e.g. cows) a
%D different set of values need to be determined.
@@ -270,11 +270,21 @@
\def\defaultmtharrowfill{\mtharrowfill 7227}
%D We now define some arrow fills that will be used for defining the
-%D arrows. \tex{leftarrowfill} and \tex{rightarrowfill} are redefined
-%D using \tex{defaultmtharrowfill}.
+%D arrows. Plain \TEX\ already defines \tex{leftarrowfill} and
+%D \tex{rightarrowfill}. The \tex{defaultmtharrowfill} command defines an
+%D arrowfill that takes an argument (so that it can also be used
+%D with over and under arrows). However the Plain \TEX\ definitions of
+%D \tex{leftarrowfill} and \tex{rightarrowfill} do not take this extra
+%D argument. To be backward compatible with Plain \TEX, we define two
+%D arrowfills: \tex{specrightarrowfill} which takes an extra argument, and
+%D \tex{rightarrowfill} which does not.
+
+\def\specrightarrowfill {\defaultmtharrowfill \relbar \relbar \rightarrow}
+\def\specleftarrowfill {\defaultmtharrowfill \leftarrow \relbar \relbar}
+
+\def\rightarrowfill {\specrightarrowfill \textstyle}
+\def\leftarrowfill {\specleftarrowfill \textstyle}
-\def\rightarrowfill {\defaultmtharrowfill \relbar \relbar \rightarrow}
-\def\leftarrowfill {\defaultmtharrowfill \leftarrow \relbar \relbar}
\def\equalfill {\defaultmtharrowfill \Relbar \Relbar \Relbar}
\def\Rightarrowfill {\defaultmtharrowfill \Relbar \Relbar \Rightarrow}
\def\Leftarrowfill {\defaultmtharrowfill \Leftarrow \Relbar \Relbar}
@@ -303,8 +313,8 @@
%D \filename{extpfel.sty} and \filename{mathtools.sty} packages for
%D \LATEX\ (plus a few more).
-\definematharrow [xrightarrow] [0359] [\rightarrowfill]
-\definematharrow [xleftarrow] [3095] [\leftarrowfill]
+\definematharrow [xrightarrow] [0359] [\specrightarrowfill]
+\definematharrow [xleftarrow] [3095] [\specleftarrowfill]
\definematharrow [xequal] [0099] [\equalfill]
\definematharrow [xRightarrow] [0359] [\Rightarrowfill]
\definematharrow [xLeftarrow] [3095] [\Leftarrowfill]
@@ -321,7 +331,7 @@
\definematharrow [xhookrightarrow] [0395] [\hookrightfill]
\definematharrow [xrel] [0099] [\relfill]
\definematharrow [xtriplerel] [0099] [\triplerelfill]
-\definematharrow [xrightoverleftarrow] [0359,3095] [\rightarrowfill,\leftarrowfill]
+\definematharrow [xrightoverleftarrow] [0359,3095] [\specrightarrowfill,\specleftarrowfill]
\definematharrow [xleftrightharpoons] [3399,3399] [\leftharpoonupfill,\rightharpoondownfill]
\definematharrow [xrightleftharpoons] [3399,3399] [\rightharpoonupfill,\leftharpoondownfill]
@@ -394,8 +404,8 @@
%D Now we define the arrows
-\definemathoverarrow [overleftarrow] [\leftarrowfill]
-\definemathoverarrow [overrightarrow] [\rightarrowfill]
+\definemathoverarrow [overleftarrow] [\specleftarrowfill]
+\definemathoverarrow [overrightarrow] [\specrightarrowfill]
\definemathoverarrow [overleftrightarrow] [\leftrightarrowfill]
\definemathoverarrow [overtwoheadrightarrow] [\twoheadrightarrowfill]
\definemathoverarrow [overtwoheadleftarrow] [\twoheadleftarrowfill]
@@ -404,8 +414,8 @@
\definemathoverarrow [overleftharpoondown] [1pt] [\leftharpoondownfill]
\definemathoverarrow [overleftharpoonup] [\leftharpoonupfill]
-\definemathunderarrow [underleftarrow] [\leftarrowfill]
-\definemathunderarrow [underrightarrow] [\rightarrowfill]
+\definemathunderarrow [underleftarrow] [\specleftarrowfill]
+\definemathunderarrow [underrightarrow] [\specrightarrowfill]
\definemathunderarrow [underleftrightarrow] [\leftrightarrowfill]
\definemathunderarrow [undertwoheadrightarrow][\twoheadrightarrowfill]
\definemathunderarrow [undertwoheadleftarrow] [\twoheadleftarrowfill]
diff --git a/tex/context/base/meta-pdf.lua b/tex/context/base/meta-pdf.lua
index 5d9c43231..fc9ac004e 100644
--- a/tex/context/base/meta-pdf.lua
+++ b/tex/context/base/meta-pdf.lua
@@ -8,6 +8,8 @@
-- version 2 gsubbed the file into TeX code, and version 3 uses
-- the new lpeg functionality and streams the result into TeX.
+-- We will move old stuff to edu.
+
--~ old lpeg 0.4 lpeg 0.5
--~ 100 times test graphic 2.45 (T:1.07) 0.72 (T:0.24) 0.580 (0.560 no table) -- 0.54 optimized for one space (T:0.19)
--~ 100 times big graphic 10.44 4.30/3.35 nogb 2.914 (2.050 no table) -- 1.99 optimized for one space (T:0.85)
@@ -46,7 +48,7 @@ function mptopdf.parse()
mptopdf.parsers[mptopdf.parser]()
end
--- shared code
+-- old code
mptopdf.steps = { }
@@ -63,8 +65,6 @@ function mptopdf.descape(str)
return str:gsub("\\([%(%)\\])",mptopdf.descapes)
end
--- old code
-
function mptopdf.steps.descape(str)
str = str:gsub("\\(%d%d%d)",function(n)
return "\\\\char" .. tonumber(n,8) .. " "
@@ -217,10 +217,10 @@ end
function mptopdf.convertmpstopdf(name)
if mptopdf.loaded(name) then
garbagecollector.push()
- input.start_timing(mptopdf)
+ input.starttiming(mptopdf)
mptopdf.parse()
mptopdf.reset()
- input.stop_timing(mptopdf)
+ input.stoptiming(mptopdf)
garbagecollector.pop()
else
tex.print("file " .. name .. " not found")
@@ -342,18 +342,6 @@ function mp.textext(font, scale, str) -- old parser
mptopdf.resetpath()
end
-function mp.fshow(str,font,scale) -- lpeg parser
- mp.textext(font,scale,mptopdf.descape(str))
---~ local dx, dy = 0, 0
---~ if #mptopdf.stack.path > 0 then
---~ dx, dy = mptopdf.stack.path[1][1], mptopdf.stack.path[1][2]
---~ end
---~ mptopdf.flushconcat()
---~ mptopdf.texcode("\\MPStextext{"..font.."}{"..scale.."}{"..mptopdf.descape(str).."}{"..dx.."}{"..dy.."}")
---~ mptopdf.resetpath()
-end
-
-
--~ function mp.handletext(font,scale.str,dx,dy)
--~ local one, two = string.match(str, "^(%d+)::::(%d+)")
--~ if one and two then
@@ -473,6 +461,24 @@ end
-- that MetaPost produces. It's my first real lpeg code, which may
-- show. Because the parser binds to functions, we define it last.
+do -- assumes \let\c\char
+
+ local byte = string.byte
+ local digit = lpeg.R("09")
+ local spec = digit^2 * lpeg.P("::::") * digit^2
+ local text = lpeg.Cc("{") * (
+ lpeg.P("\\") * ( (digit * digit * digit) / function(n) return "c" .. tonumber(n,8) end) +
+ lpeg.P(" ") / function(n) return "\\c32" end + -- never in new mp
+ lpeg.P(1) / function(n) return "\\c" .. byte(n) end
+ ) * lpeg.Cc("}")
+ local package = lpeg.Cs(spec + text^0)
+
+ function mp.fshow(str,font,scale) -- lpeg parser
+ mp.textext(font,scale,package:match(str))
+ end
+
+end
+
do
local eol = lpeg.S('\r\n')^1
@@ -517,8 +523,10 @@ do
local concat = (lpeg.P("[") * (cnumber * sp^0)^6 * lpeg.P("]") * sp * lpeg.P("concat") ) / mp.concat
local scale = ( (cnumber * sp^0)^6 * sp * lpeg.P("concat") ) / mp.concat
- local fshow = (lpeg.P("(") * lpeg.C((1-lpeg.P(")"))^1) * lpeg.P(")") * space * lpeg.C(lpeg.P((1-space)^1)) * space * cnumber * space * lpeg.P("fshow")) / mp.fshow
local fshow = (lpeg.P("(") * lpeg.C((1-lpeg.P(")"))^1) * lpeg.P(")") * space * cstring * space * cnumber * space * lpeg.P("fshow")) / mp.fshow
+ local fshow = (lpeg.P("(") *
+ lpeg.Cs( ( lpeg.P("\\(")/"\\050" + lpeg.P("\\)")/"\\051" + (1-lpeg.P(")")) )^1 )
+ * lpeg.P(")") * space * cstring * space * cnumber * space * lpeg.P("fshow")) / mp.fshow
local setlinewidth_x = (lpeg.P("0") * sp * cnumber * sp * lpeg.P("dtransform truncate idtransform setlinewidth pop")) / mp.setlinewidth
local setlinewidth_y = (cnumber * sp * lpeg.P("0 dtransform exch truncate exch idtransform pop setlinewidth") ) / mp.setlinewidth
@@ -560,7 +568,6 @@ do
local attribute = ((cnumber * sp)^2 * lpeg.P("attribute")) / mp.attribute
local A = ((cnumber * sp)^2 * lpeg.P("A")) / mp.attribute
-
local preamble = (
prolog + setup +
boundingbox + highresboundingbox + specials + special +
@@ -570,7 +577,7 @@ do
local procset = (
lj + ml + lc +
c + l + m + n + p + r +
-A +
+ A +
R + C + G +
S + F + B + W +
vlw + hlw +
@@ -584,7 +591,7 @@ A +
local verbose = (
curveto + lineto + moveto + newpath + closepath + rlineto +
setrgbcolor + setcmykcolor + setgray +
-attribute +
+ attribute +
setlinejoin + setmiterlimit + setlinecap +
stroke + fill + clip + both +
setlinewidth_x + setlinewidth_y +
diff --git a/tex/context/base/meta-pdf.mkii b/tex/context/base/meta-pdf.mkii
index 27774b34d..97aedf97e 100644
--- a/tex/context/base/meta-pdf.mkii
+++ b/tex/context/base/meta-pdf.mkii
@@ -639,8 +639,15 @@
%D finally I saw the light. It proved that we also had to
%D take care of \type{(split arguments)}.
+% \startMPcode
+% draw btex Ga toch effe f\kern0ptietsen?{}` etex ;
+% \stopMPcode
+
+\newtoks \everyMPshowfont
+
\def\setMPfshowfont#1#2%
- {\font\temp=#1\space at #2\relax\temp}
+ {\font\temp=#1\space at #2\relax\temp
+ \the\everyMPshowfont}
\let\MPfshowcommand\empty
@@ -648,7 +655,7 @@
{\setbox\scratchbox\hbox
{\obeyMPspecials
\edef\MPtextsize{\gMPa\nofMParguments}%
- \def\do(##1){##1}%
+ \def\do(##1){##1}% only works in latest mp
\edef\MPtextdata{\dogMPa1}% beware, stack can have more
\handleMPtext}%
\setbox\scratchbox\hbox
@@ -672,6 +679,29 @@
% elsewhere we will implement \handleMPtextmptxt
+\def\doflushMPtext#1%
+ {\edef\!!stringa{#1}%
+ \@EA\dodoflushMPtext\!!stringa\relax}
+
+\def\dodoflushMPtext
+ {\afterassignment\dododoflushMPtext\let\nexttoken=}
+
+\def\dododoflushMPtext
+ {\ifx\nexttoken\relax
+ % done
+ \else\ifx\nexttoken\char
+ \@EA\@EA\@EA\dodododoflushMPtext
+ \else
+ {\nexttoken}%
+ \@EA\@EA\@EA\dodoflushMPtext
+ \fi\fi}
+
+\def\dodododoflushMPtext
+ {\afterassignment\dododododoflushMPtext\scratchcounter}
+
+\def\dododododoflushMPtext
+ {{\char\scratchcounter}\let\next\dodoflushMPtext}
+
\def\handleMPtextnormal
{\let\ \relax % mp breaks long lines and appends a \
\ifx\MPtextsize\PSnfont % round font size (to pt)
@@ -694,8 +724,8 @@
\MPfshowcommand
{\ifnum\nofMParguments=\plusone
\def\do(##1){##1}%
- \dogMPa1%
- \else
+ \doflushMPtext{\dogMPa1}%
+ \else % can't happen anymore in mp version 1+
% we need to catch ( a ) (a a a) (\123 \123 \123) etc
\scratchcounter\plusone
\def\dodo##1% Andreas Fieger's bug: (\304...)
@@ -862,7 +892,7 @@
\or
\ifx\lastMPmoveX\empty \else % we assume 0,0 rlineto
\flushMPmoveto
- \PDFcode{\!MP\lastMPmoveX\space \!MP\lastMPmoveY\space l S}%
+ \PDFcode{\!MP\lastMPmoveX\space \!MP\lastMPmoveY\space l}%
\resetMPmoveto
\fi
\or
diff --git a/tex/context/base/meta-pdf.mkiv b/tex/context/base/meta-pdf.mkiv
index d10734547..e8ce94146 100644
--- a/tex/context/base/meta-pdf.mkiv
+++ b/tex/context/base/meta-pdf.mkiv
@@ -28,7 +28,7 @@
%D Plugin.
-\def\mkconvertMPtoPDF
+\def\mkconvertMPtoPDF % watch the transparency reset
{\vbox\bgroup
\forgetall
\offinterlineskip
@@ -40,6 +40,7 @@
\PDFcomment{mps begin}%
\PDFcode{q 1 0 0 1 0 0 cm}%
\ctxlua{mptopdf.convertmpstopdf("\MPfilename")}\removeunwantedspaces
+ \dohandleMPresettransparency % a bit messy here, should be a toks
\PDFcode{Q}%
\PDFcomment{mps end}%
\stopMPresources
@@ -86,6 +87,7 @@
\executeifdefined{handleMPtext\MPtexttag}
{\setbox\scratchbox\hbox
{\font\temp=#1\space at #2\onebasepoint
+ \let\c\char
\temp
\MPfshowcommand{#3}}%
\setbox\scratchbox\hbox
@@ -113,11 +115,11 @@
%D The boundingbox.
\def\MPSboundingbox#1#2#3#4%
- {\xdef\MPllx{#1}
- \xdef\MPlly{#2}
- \xdef\MPurx{#3}
- \xdef\MPury{#4}
- \xdef\MPwidth {\the\dimexpr#3\onebasepoint-#1\onebasepoint\relax}
+ {\xdef\MPllx{#1}%
+ \xdef\MPlly{#2}%
+ \xdef\MPurx{#3}%
+ \xdef\MPury{#4}%
+ \xdef\MPwidth {\the\dimexpr#3\onebasepoint-#1\onebasepoint\relax}%
\xdef\MPheight{\the\dimexpr#4\onebasepoint-#2\onebasepoint\relax}}
\MPSboundingbox0000
diff --git a/tex/context/base/meta-pdf.tex b/tex/context/base/meta-pdf.tex
index 4dec40e70..d5b7f202f 100644
--- a/tex/context/base/meta-pdf.tex
+++ b/tex/context/base/meta-pdf.tex
@@ -13,6 +13,8 @@
%D Formerly known as supp-pdf.tex and supp-mpe.tex.
+%D We will clean up the color mess later.
+
%D These macros are written as generic as possible. Some
%D general support macro's are loaded from a small module
%D especially made for non \CONTEXT\ use. In this module I
@@ -968,19 +970,27 @@
\ifx\colorversion\undefined \else \ifnum\colorversion>\plusone
- \def\dohandleMPgraycolor #1{\ctxlua{ctx.pdfgrayliteral(\the\currentcolormodel,#1)}}
- \def\dohandleMPrgbcolor #1#2#3{\ctxlua{ctx.pdfrgbliteral (\the\currentcolormodel,#1,#2,#3)}}
- \def\dohandleMPcmykcolor#1#2#3#4{\ctxlua{ctx.pdfcmykliteral(\the\currentcolormodel,#1,#2,#3,#4)}}
- \def\dohandleMPspotcolor#1#2#3#4{\ctxlua{ctx.pdfspotliteral(\the\currentcolormodel,"#1",#2,"#3","#4")}}
-
- \def\dohandleMPgraytransparency #1#2#3{\ctxlua{ctx.pdfgrayliteral(\the\currentcolormodel,#1)}%
- \ctxlua{ctx.pdftransparencyliteral(#2,#3)}}
- \def\dohandleMPrgbtransparency #1#2#3#4#5{\ctxlua{ctx.pdfrgbliteral (\the\currentcolormodel,#1,#2,#3)}%
- \ctxlua{ctx.pdftransparencyliteral(#4,#5)}}
- \def\dohandleMPcmyktransparency#1#2#3#4#5#6{\ctxlua{ctx.pdfcmykliteral(\the\currentcolormodel,#1,#2,#3,#4)}%
- \ctxlua{ctx.pdftransparencyliteral(#5,#6)}}
- \def\dohandleMPspottransparency#1#2#3#4#5#6{\ctxlua{ctx.pdfspotliteral(\the\currentcolormodel,"#1",#2,"#3","#4")}%
- \ctxlua{ctx.pdftransparencyliteral(#5,#6)}}
+ \def\dohandleMPgraycolor #1{\ctxlua{ctx.pdffinishtransparency()
+ ctx.pdfgrayliteral(\the\currentcolormodel,#1)}}
+ \def\dohandleMPrgbcolor #1#2#3{\ctxlua{ctx.pdffinishtransparency()
+ ctx.pdfrgbliteral (\the\currentcolormodel,#1,#2,#3)}}
+ \def\dohandleMPcmykcolor#1#2#3#4{\ctxlua{ctx.pdffinishtransparency()
+ ctx.pdfcmykliteral(\the\currentcolormodel,#1,#2,#3,#4)}}
+ \def\dohandleMPspotcolor#1#2#3#4{\ctxlua{ctx.pdffinishtransparency()
+ ctx.pdfspotliteral(\the\currentcolormodel,"#1",#2,"#3","#4")}}
+
+ % we can combine the next calls
+
+ \def\dohandleMPgraytransparency #1#2#3{\ctxlua{ctx.pdfgrayliteral(\the\currentcolormodel,#1)
+ ctx.pdftransparencyliteral(#2,#3)}}
+ \def\dohandleMPrgbtransparency #1#2#3#4#5{\ctxlua{ctx.pdfrgbliteral (\the\currentcolormodel,#1,#2,#3)
+ ctx.pdftransparencyliteral(#4,#5)}}
+ \def\dohandleMPcmyktransparency#1#2#3#4#5#6{\ctxlua{ctx.pdfcmykliteral(\the\currentcolormodel,#1,#2,#3,#4)
+ ctx.pdftransparencyliteral(#5,#6)}}
+ \def\dohandleMPspottransparency#1#2#3#4#5#6{\ctxlua{ctx.pdfspotliteral(\the\currentcolormodel,"#1",#2,"#3","#4")
+ ctx.pdftransparencyliteral(#5,#6)}}
+
+ \def\dohandleMPresettransparency {\ctxlua{ctx.pdffinishtransparency()}}
\def\resolveMPgraycolor #1\to#2{\ctxlua{ctx.resolvempgraycolor("\strippedcsname#2","MPresolvedspace",\number\currentcolormodel,#1)}}
\def\resolveMPrgbcolor #1#2#3\to#4{\ctxlua{ctx.resolvemprgbcolor ("\strippedcsname#4","MPresolvedspace",\number\currentcolormodel,#1,#2,#3)}}
diff --git a/tex/context/base/mult-con.tex b/tex/context/base/mult-con.tex
index 22103e615..7a37869a9 100644
--- a/tex/context/base/mult-con.tex
+++ b/tex/context/base/mult-con.tex
@@ -502,6 +502,10 @@ subsubsubsubsubsubject: subsubsubsubsubonderwerp subsubsubsubsub
zitat citace
citazione citat
citation
+ blockquote: blokcitaat blockquote
+ blockquote blockquote
+ blockquote blockquote
+ blockquote
quote: citeer quote
zitieren citovat
menzione minicitat
@@ -1801,6 +1805,10 @@ numberwidth: nummerbreedte numberwidth
ausrichtung zarovnani
allinea aliniere
alignement
+ symalign: symuitlijnen symalign
+ symausrichtung symzarovnani
+ symallinea symaliniere
+ symalignement
urlspace: urlspatie urlspace
urlspatium prostorurl
spaziourl spatiuurl
diff --git a/tex/context/base/mult-sys.tex b/tex/context/base/mult-sys.tex
index f4f8aa5a7..69381aab3 100644
--- a/tex/context/base/mult-sys.tex
+++ b/tex/context/base/mult-sys.tex
@@ -574,6 +574,7 @@
\definesystemvariable {vn} % VoetNoten
\definesystemvariable {vt} % VerTical
\definesystemvariable {wr} % WitRuimte
+\definesystemvariable {wl} % WordList
\definesystemvariable {xf} % XML File
\definesystemvariable {xp} % XML Processing
\definesystemvariable {xy} % schaal
diff --git a/tex/context/base/node-ini.lua b/tex/context/base/node-ini.lua
index af30b3940..d2cfc17e0 100644
--- a/tex/context/base/node-ini.lua
+++ b/tex/context/base/node-ini.lua
@@ -8,14 +8,63 @@ if not modules then modules = { } end modules ['node-ini'] = {
--[[ldx--
<p>Access to nodes is what gives <l n='luatex'/> its power. Here we
-implement a few helper functions.</p>
+implement a few helper functions. These functions are rather optimized.</p>
--ldx]]--
-nodes = nodes or { }
-nodes.trace = false
+nodes = nodes or { }
+nodes.trace = false
+nodes.ignore = nodes.ignore or false
-- handy helpers
+if node.protect_glyphs then
+
+ nodes.protect_glyphs = node.protect_glyphs
+ nodes.unprotect_glyphs = node.unprotect_glyphs
+
+else do
+
+ -- initial value subtype : X000 0001 = 1 = 0x01 = char
+ --
+ -- expected before linebreak : X000 0000 = 0 = 0x00 = glyph
+ -- X000 0010 = 2 = 0x02 = ligature
+ -- X000 0100 = 4 = 0x04 = ghost
+ -- X000 1010 = 10 = 0x0A = leftboundary lig
+ -- X001 0010 = 18 = 0x12 = rightboundary lig
+ -- X001 1010 = 26 = 0x1A = both boundaries lig
+ -- X000 1100 = 12 = 0x1C = leftghost
+ -- X001 0100 = 20 = 0x14 = rightghost
+
+
+ local glyph = node.id('glyph')
+ local traverse_id = node.traverse_id
+
+ function nodes.protect_glyphs(head)
+ local done = false
+ for g in traverse_id(glyph,head) do
+ local s = g.subtype
+ if s == 1 then
+ done, g.subtype = true, 256
+ elseif s <= 256 then
+ done, g.subtype = true, 256 + s
+ end
+ end
+ return done
+ end
+
+ function nodes.unprotect_glyphs(head)
+ local done = false
+ for g in traverse_id(glyph,head) do
+ local s = g.subtype
+ if s > 256 then
+ done, g.subtype = true, s - 256
+ end
+ end
+ return done
+ end
+
+end end
+
do
local remove, free = node.remove, node.free
@@ -76,44 +125,18 @@ function nodes.report(t,done)
end
end
---~ function nodes.count(stack)
---~ if stack then
---~ local n = 0
---~ for _, node in pairs(stack) do
---~ if node then
---~ local kind = node[1]
---~ if kind == 'hlist' or kind == 'vlist' then
---~ local content = node[8]
---~ if type(content) == "table" then
---~ n = n + 1 + nodes.count(content) -- self counts too
---~ else
---~ n = n + 1
---~ end
---~ elseif kind == 'inline' then
---~ n = n + nodes.count(node[4]) -- self does not count
---~ else
---~ n = n + 1
---~ end
---~ end
---~ end
---~ return n
---~ else
---~ return 0
---~ end
---~ end
-
do
local hlist, vlist = node.id('hlist'), node.id('vlist')
- function nodes.count(stack)
+ local function count(stack,flat)
local n = 0
while stack do
local id = stack.id
- if id == hlist or id == vlist then
+ if not flat and id == hlist or id == vlist then
local list = stack.list
if list then
- n = n + 1 + nodes.count(list) -- self counts too
+ n = n + 1 + count(list) -- self counts too
else
n = n + 1
end
@@ -125,6 +148,8 @@ do
return n
end
+ nodes.count = count
+
end
--[[ldx--
@@ -147,95 +172,14 @@ original table is used.</p>
<p>Insertion is handled (at least in <l n='context'/> as follows. When
we need to insert a node at a certain position, we change the node at
that position by a dummy node, tagged <type>inline</type> which itself
-contains the original node and one or more new nodes. Before we pass
+has_attribute the original node and one or more new nodes. Before we pass
back the list we collapse the list. Of course collapsing could be built
into the <l n='tex'/> engine, but this is a not so natural extension.</p>
<p>When we collapse (something that we only do when really needed), we
-also ignore the empty nodes.</p>
+also ignore the empty nodes. [This is obsolete!]</p>
--ldx]]--
---~ function nodes.inline(...)
---~ return { 'inline', 0, nil, { ... } }
---~ end
-
---~ do
-
---~ function collapse(stack,existing_t)
---~ if stack then
---~ local t = existing_t or { }
---~ for _, node in pairs(stack) do
---~ if node then
---~ -- if node[3] then node[3][1] = nil end -- remove status bit
---~ local kind = node[1]
---~ if kind == 'inline' then
---~ collapse(node[4],t)
---~ elseif kind == 'hlist' or kind == 'vlist' then
---~ local content = node[8]
---~ if type(content) == "table" then
---~ node[8] = collapse(content)
---~ end
---~ t[#t+1] = node
---~ else
---~ t[#t+1] = node
---~ end
---~ else
---~ -- deleted node
---~ end
---~ end
---~ return t
---~ else
---~ return stack
---~ end
---~ end
-
---~ nodes.collapse = collapse
-
---~ end
-
---[[ldx--
-<p>The following function implements a generic node processor. A
-generic processer is not that much needed, because we often need
-to act differently for horizontal or vertical lists. For instance
-counting nodes needs a different method (ok, we could add a second
-handle for catching them but it would become messy then).</p>
---ldx]]--
-
---~ function nodes.each(stack,handle)
---~ if stack then
---~ local i = 1
---~ while true do
---~ local node = stack[i]
---~ if node then
---~ local kind = node[1]
---~ if kind == 'hlist' or kind == 'vlist' then
---~ local content = node[8]
---~ if type(content) == "table" then
---~ nodes.each(content,handle)
---~ end
---~ elseif kind == 'inline' then
---~ nodes.each(node[4],handle)
---~ else
---~ stack[i] = handle(kind,node)
---~ end
---~ end
---~ i = i + 1
---~ if i > #stack then
---~ break
---~ end
---~ end
---~ end
---~ end
-
---~ function nodes.remove(stack,id,subid) -- "whatsit", 6
---~ nodes.each(stack, function(kind,node)
---~ if kind == id and node[2] == subid then
---~ return false
---~ else
---~ return node
---~ end
---~ end)
---~ end
--[[ldx--
<p>Serializing nodes can be handy for tracing. Also, saving and
@@ -298,64 +242,100 @@ if not fonts.tfm.id then fonts.tfm.id = { } end
do
- local glyph, hlist, vlist = node.id('glyph'), node.id('hlist'), node.id('vlist')
- local pushmarks = false
+ local glyph = node.id('glyph')
+ local has_attribute = node.has_attribute
+ local traverse_id = node.traverse_id
- function nodes.process_glyphs(head)
+ local pairs = pairs
+
+ local starttiming, stoptiming = input.starttiming, input.stoptiming
+
+ function nodes.process_characters(head)
if status.output_active then -- not ok, we need a generic blocker, pagebody ! / attr tex.attibutes
- -- 25% calls
- return true
- elseif not head then
- -- 25% calls
- return true
- elseif not head.next and (head.id == hlist or head.id == vlist) then
- return head
+ return head, false -- true
else
-- either next or not, but definitely no already processed list
- input.start_timing(nodes)
- local usedfonts, found, fontdata, done = { }, false, fonts.tfm.id, false
- for n in node.traverse_id(glyph,head) do
- local font = n.font
- if not usedfonts[font] then
- local shared = fontdata[font].shared
- if shared and shared.processors then
- usedfonts[font], found = shared.processors, true
+ starttiming(nodes)
+ local usedfonts, attrfonts, done = { }, { }, false
+ -- todo: should be independent of otf
+ local set_dynamics, font_ids = fonts.otf.set_dynamics, fonts.tfm.id -- todo: font-var.lua so that we can global this one
+ local a, u, prevfont, prevattr = 0, 0, nil, 0
+ for n in traverse_id(glyph,head) do
+ local font, attr = n.font, has_attribute(n,0) -- zero attribute is reserved for fonts, preset to 0 is faster (first match)
+ if attr and attr > 0 then
+ if font ~= prevfont or attr ~= prevattr then
+ local used = attrfonts[font]
+ if not used then
+ used = { }
+ attrfonts[font] = used
+ end
+ if not used[attr] then
+ local d = set_dynamics(font_ids[font],attr) -- todo, script, language -> n.language also axis
+ if d then
+ used[attr] = d
+ a = a + 1
+ end
+ end
+ prevfont, prevattr = font, attr
+ end
+ elseif font ~= prevfont then
+ prevfont, prevattr = font, 0
+ local used = usedfonts[font]
+ if not used then
+ local data = font_ids[font]
+ if data then
+ local shared = data.shared -- we need to check shared, only when same features
+ if shared then
+ local processors = shared.processors
+ if processors and #processors > 0 then
+ usedfonts[font] = processors
+ u = u + 1
+ end
+ end
+ else
+ -- probably nullfont
+ end
end
- end
- end
- if found then
- local tail = head
- if head.next then
- tail = node.slide(head)
else
- head.prev = nil
+ prevattr = attr
end
+ end
+ -- we could combine these and just make the attribute nil
+ if u > 0 then
for font, processors in pairs(usedfonts) do
- if pushmarks then
- local h, d = fonts.pushmarks(head,font)
- head, done = head or h, done or d
- end
- for _, processor in ipairs(processors) do
- local h, d = processor(head,font)
- head, done = head or h, done or d
+ local n = #processors
+ if n == 1 then
+ local h, d = processors[1](head,font,false)
+ head, done = h or head, done or d
+ else
+ for i=1,#processors do
+ local h, d = processors[i](head,font,false)
+ head, done = h or head, done or d
+ end
end
- if pushmarks then
- local h, d = fonts.popmarks(head,font)
- head, done = head or h, done or d
+ end
+ end
+ if a > 0 then -- we need to get rid of a loop here
+ for font, dynamics in pairs(attrfonts) do
+ for attribute, processors in pairs(dynamics) do -- attr can switch in between
+ local n = #processors
+ if n == 1 then
+ local h, d = processors[1](head,font,attribute)
+ head, done = h or head, done or d
+ else
+ for i=1,n do
+ local h, d = processors[i](head,font,attribute)
+ head, done = h or head, done or d
+ end
+ end
end
end
end
- input.stop_timing(nodes)
+ stoptiming(nodes)
if nodes.trace then
nodes.report(head,done)
end
- if done then
- return head -- something changed
- elseif head then
- return true -- nothing changed
- else
- return false -- delete list
- end
+ return head, true
end
end
@@ -366,9 +346,9 @@ end
do
- local contains, set, attribute = node.has_attribute, node.set_attribute, tex.attribute
+ local has_attribute, set, attribute = node.has_attribute, node.set_attribute, tex.attribute
- function nodes.inherit_attributes(n)
+ function nodes.inherit_attributes(n) -- still ok ?
if n then
local i = 1
while true do
@@ -376,7 +356,7 @@ do
if a < 0 then
break
else
- local ai = contains(n,i)
+ local ai = has_attribute(n,i)
if not ai then
set(n,i,a)
end
@@ -400,54 +380,132 @@ function nodes.length(head)
end
end
-nodes.processors.actions = nodes.processors.actions or { }
+--~ nodes.processors.actions = nodes.processors.actions or { }
-function nodes.processors.action(head)
- if head then
- node.slide(head)
- local actions, done = nodes.processors.actions, false
- for i=1,#actions do
- local action = actions[i]
- if action then
- local h, ok = action(head)
- if ok then
- head = h
- end
- done = done or ok
- end
- end
- if done then
- return head
- else
- return true
- end
- else
- return head
- end
-end
+--~ function nodes.processors.action(head)
+--~ if head then
+--~ node.slide(head)
+--~ local done = false
+--~ local actions = nodes.processors.actions
+--~ for i=1,#actions do
+--~ local h, ok = actions[i](head)
+--~ if ok then
+--~ head, done = h, true
+--~ end
+--~ end
+--~ if done then
+--~ return head
+--~ else
+--~ return true
+--~ end
+--~ else
+--~ return head
+--~ end
+--~ end
lists = lists or { }
lists.plugins = lists.plugins or { }
-function nodes.process_lists(head)
- return nodes.process_attributes(head,lists.plugins)
-end
-
chars = chars or { }
chars.plugins = chars.plugins or { }
-function nodes.process_chars(head)
- return nodes.process_attributes(head,chars.plugins)
+--~ words = words or { }
+--~ words.plugins = words.plugins or { }
+
+callbacks.trace = false
+
+do
+
+ kernel = kernel or { }
+
+ local starttiming, stoptiming = input.starttiming, input.stoptiming
+ local hyphenate, ligaturing, kerning = lang.hyphenate, node.ligaturing, node.kerning
+
+ function kernel.hyphenation(head,tail) -- lang.hyphenate returns done
+ starttiming(kernel)
+ local done = hyphenate(head,tail)
+ stoptiming(kernel)
+ return head, tail, done
+ end
+ function kernel.ligaturing(head,tail) -- node.ligaturing returns head,tail,done
+ starttiming(kernel)
+ local head, tail, done = ligaturing(head,tail)
+ stoptiming(kernel)
+ return head, tail, done
+ end
+ function kernel.kerning(head,tail) -- node.kerning returns head,tail,done
+ starttiming(kernel)
+ local head, tail, done = kerning(head,tail)
+ stoptiming(kernel)
+ return head, tail, done
+ end
+
end
-nodes.processors.actions = { -- for the moment here, will change
- nodes.process_chars, -- attribute driven
- nodes.process_glyphs, -- font driven
- nodes.process_lists, -- attribute driven
-}
+callback.register('hyphenate' , function(head,tail) return tail end)
+callback.register('ligaturing', function(head,tail) return tail end)
+callback.register('kerning' , function(head,tail) return tail end)
-callback.register('pre_linebreak_filter', nodes.processors.action)
-callback.register('hpack_filter', nodes.processors.action)
+-- used to be loop, this is faster, called often; todo: shift up tail or even better,
+-- handle tail everywhere; for the moment we're safe
+
+do
+
+ local charplugins, listplugins = chars.plugins, lists.plugins
+
+ nodes.processors.actions = function(head,tail) -- removed: if head ... end
+ local ok, done = false, false
+ head, ok = nodes.process_attributes(head,charplugins) ; done = done or ok -- attribute driven
+ head, tail, ok = kernel.hyphenation (head,tail) ; done = done or ok -- language driven
+ head, ok = languages.words.check (head,tail) ; done = done or ok -- language driven
+ head, ok = nodes.process_characters(head) ; done = done or ok -- font driven
+ ok = nodes.protect_glyphs (head) ; done = done or ok -- turn chars into glyphs
+ head, tail, ok = kernel.ligaturing (head,tail) ; done = done or ok -- normal ligaturing routine / needed for base mode
+ head, tail, ok = kernel.kerning (head,tail) ; done = done or ok -- normal kerning routine / needed for base mode
+ head, ok = nodes.process_attributes(head,listplugins) ; done = done or ok -- attribute driven
+ return head, done
+ end
+
+end
+
+do
+
+ local actions = nodes.processors.actions
+ local first_character = node.first_character
+ local slide = node.slide
+
+ local function tracer(what,state,head,groupcode,glyphcount)
+ texio.write_nl(string.format("%s %s: group: %s, nodes: %s",
+ (state and "Y") or "N", what, groupcode or "?", nodes.count(head,true)))
+ end
+
+ function nodes.processors.pre_linebreak_filter(head,groupcode) -- todo: tail
+ local first, found = first_character(head)
+ if found then
+ if callbacks.trace then tracer("pre_linebreak",true,head,groupcode) end
+ local head, done = actions(head,slide(head))
+ return (done and head) or true
+ else
+ if callbacks.trace then tracer("pre_linebreak",false,head,groupcode) end
+ return true
+ end
+ end
+
+ function nodes.processors.hpack_filter(head,groupcode) -- todo: tail
+ local first, found = first_character(head)
+ if found then
+ if callbacks.trace then tracer("hpack",true,head,groupcode) end
+ local head, done = actions(head,slide(head))
+ return (done and head) or true
+ end
+ if callbacks.trace then tracer("hpack",false,head,groupcode) end
+ return true
+ end
+
+end
+
+callback.register('pre_linebreak_filter', nodes.processors.pre_linebreak_filter)
+callback.register('hpack_filter' , nodes.processors.hpack_filter)
do
@@ -462,16 +520,40 @@ do
-- flat: don't use next, but indexes
-- verbose: also add type
+ -- can be sped up
+
+ function nodes.astable(n,sparse)
+ local f, t = node.fields(n.id,n.subtype), { }
+ for i=1,#f do
+ local v = f[i]
+ local d = n[v]
+ if d then
+ if v == "ref_count" or v == "id" then
+ -- skip
+ elseif expand[v] then -- or: type(n[v]) ~= "string" or type(n[v]) ~= "number" or type(n[v]) ~= "table"
+ t[v] = "pointer to list"
+ elseif sparse then
+ if (type(d) == "number" and d ~= 0) or (type(d) == "string" and d ~= "") then
+ t[v] = d
+ end
+ else
+ t[v] = d
+ end
+ end
+ end
+ t.type = node.type(n.id)
+ return t
+ end
function nodes.totable(n,flat,verbose)
- local function totable(n,verbose)
+ local function totable(n)
local f = node.fields(n.id,n.subtype)
local tt = { }
for _,v in ipairs(f) do
if n[v] then
if v == "ref_count" then
-- skip
- elseif expand[v] then -- or: type(n[v]) ~= "string" or type(n[v]) ~= "number"
+ elseif expand[v] then -- or: type(n[v]) ~= "string" or type(n[v]) ~= "number" or type(n[v]) ~= "table"
tt[v] = nodes.totable(n[v],flat,verbose)
else
tt[v] = n[v]
@@ -487,12 +569,12 @@ do
if flat then
local t = { }
while n do
- t[#t+1] = totable(n,verbose)
+ t[#t+1] = totable(n)
n = n.next
end
return t
else
- local t = totable(n,verbose)
+ local t = totable(n)
if n.next then
t.next = nodes.totable(n.next,flat,verbose)
end
@@ -504,11 +586,7 @@ do
end
local function key(k)
- if type(k) == "number" then
- return "["..k.."]"
- else
- return k
- end
+ return ((type(k) == "number") and "["..k.."]") or k
end
local function serialize(root,name,handle,depth,m)
@@ -518,13 +596,14 @@ do
handle(("%s%s={"):format(depth,key(name)))
else
depth = ""
- if type(name) == "string" then
+ local tname = type(name)
+ if tname == "string" then
if name == "return" then
handle("return {")
else
handle(name .. "={")
end
- elseif type(name) == "number" then
+ elseif tname == "number"then
handle("[" .. name .. "]={")
else
handle("t={")
@@ -533,7 +612,7 @@ do
if root then
local fld
if root.id then
- fld = node.fields(root.id,root.subtype)
+ fld = node.fields(root.id,root.subtype) -- we can cache these (todo)
else
fld = table.sortedkeys(root)
end
@@ -541,13 +620,23 @@ do
handle(("%s %s=%q,"):format(depth,'type',root['type']))
end
for _,k in ipairs(fld) do
- if k then
+ if k == "ref_count" then
+ -- skip
+ elseif k then
local v = root[k]
local t = type(v)
if t == "number" then
+if v == 0 then
+ -- skip
+else
handle(("%s %s=%s,"):format(depth,key(k),v))
+end
elseif t == "string" then
+if v == "" then
+ -- skip
+else
handle(("%s %s=%q,"):format(depth,key(k),v))
+end
elseif v then -- userdata or table
serialize(v,k,handle,depth,m+1)
end
@@ -585,9 +674,22 @@ do
tex.print("\\stoptyping")
end
+ function nodes.check_for_leaks(sparse)
+ local l = { }
+ local q = node.usedlist()
+ for p in node.traverse(q) do
+ local s = table.serialize(nodes.astable(p,sparse),node.type(p.id))
+ l[s] = (l[s] or 0) + 1
+ end
+ node.flush_list(q)
+ for k, v in pairs(l) do
+ texio.write_nl(string.format("%s * %s", v, k))
+ end
+ end
+
end
-if not node.list_has_attribute then
+if not node.list_has_attribute then -- no longer needed
function node.list_has_attribute(list,attribute)
if list and attribute then
@@ -609,377 +711,48 @@ function nodes.pack_list(head)
return t
end
--- helpers
-
do
- local kern_node = node.new("kern",1)
- local penalty_node = node.new("penalty")
- local glue_node = node.new("glue")
- local glue_spec_node = node.new("glue_spec")
+ local glue, whatsit, hlist = node.id("glue"), node.id("whatsit"), node.id("hlist")
- function nodes.penalty(p)
- local n = node.copy(penalty_node)
- n.penalty = p
- return n
- end
- function nodes.kern(k)
- local n = node.copy(kern_node)
- n.kern = k
- return n
- end
- function nodes.glue(width,stretch,shrink)
- local n = node.copy(glue_node)
- local s = node.copy(glue_spec_node)
- s.width, s.stretch, s.shrink = width, stretch, shrink
- n.spec = s
- return n
+ function nodes.leftskip(n)
+ while n do
+ local id = n.id
+ if id == glue then
+ if n.subtype == 8 then -- 7 in c/web source
+ return (n.spec and n.spec.width) or 0
+ else
+ return 0
+ end
+ elseif id == whatsit then
+ n = n.next
+ elseif id == hlist then
+ return n.width
+ else
+ break
+ end
+ end
+ return 0
end
- function nodes.glue_spec(width,stretch,shrink)
- local s = node.copy(glue_spec_node)
- s.width, s.stretch, s.shrink = width, stretch, shrink
- return s
+ function nodes.rightskip(n)
+ if n then
+ n = node.slide(n)
+ while n do
+ local id = n.id
+ if id == glue then
+ if n.subtype == 9 then -- 8 in the c/web source
+ return (n.spec and n.spec.width) or 0
+ else
+ return 0
+ end
+ elseif id == whatsit then
+ n = n.prev
+ else
+ break
+ end
+ end
+ end
+ return false
end
end
-
--- old code
-
---~ function nodes.do_process_glyphs(stack)
---~ if not stack or #stack == 0 then
---~ return false
---~ elseif #stack == 1 then
---~ local node = stack[1]
---~ if node then
---~ local kind = node[1]
---~ if kind == 'glyph' then
---~ local tfmdata = fonts.tfm.id[node[5]] -- we can use fonts.tfm.processor_id
---~ if tfmdata and tfmdata.shared and tfmdata.shared.processors then
---~ for _, func in pairs(tfmdata.shared.processors) do -- per font
---~ func(stack,1,node)
---~ end
---~ end
---~ elseif kind == 'hlist' or kind == "vlist" then
---~ local done = nodes.do_process_glyphs(node[8])
---~ end
---~ return true
---~ else
---~ return false
---~ end
---~ else
---~ local font_ids = { }
---~ local done = false
---~ for _, v in pairs(stack) do
---~ if v then
---~ if v[1] == 'glyph' then
---~ local font_id = v[5]
---~ local tfmdata = fonts.tfm.id[font_id] -- we can use fonts.tfm.processor_id
---~ if tfmdata and tfmdata.shared and tfmdata.shared.processors then
---~ font_ids[font_id] = tfmdata.shared.processors
---~ end
---~ end
---~ end
---~ end
---~ if done then
---~ return false
---~ else
---~ -- todo: generic loop before
---~ for font_id, _ in pairs(font_ids) do
---~ for _, func in pairs(font_ids[font_id]) do -- per font
---~ local i = 1
---~ while true do
---~ local node = stack[i]
---~ if node and node[1] == 'glyph' and node[5] == font_id then
---~ i = func(stack,i,node)
---~ end
---~ if i < #stack then
---~ i = i + 1
---~ else
---~ break
---~ end
---~ end
---~ end
---~ end
---~ for i=1, #stack do
---~ local node = stack[i]
---~ if node then
---~ if node[1] == 'hlist' or node[1] == "vlist" then
---~ nodes.do_process_glyphs(node[8])
---~ end
---~ end
---~ end
---~ return true
---~ end
---~ end
---~ end
-
---~ function nodes.do_process_glyphs(stack)
---~ local function process_list(node)
---~ local done = false
---~ if node and node[1] == 'hlist' or node[1] == "vlist" then
---~ local attributes = node[3]
---~ if attributes then
---~ if not attributes[1] then
---~ nodes.do_process_glyphs(node[8])
---~ attributes[1] = 1
---~ done = true
---~ end
---~ else
---~ nodes.do_process_glyphs(node[8])
---~ node[3] = { 1 }
---~ done = true
---~ end
---~ end
---~ return done
---~ end
---~ if not stack or #stack == 0 then
---~ return false
---~ elseif #stack == 1 then
---~ return process_list(stack[1])
---~ else
---~ local font_ids, found = { }, false
---~ for _, node in ipairs(stack) do
---~ if node and node[1] == 'glyph' then
---~ local font_id = node[5]
---~ local tfmdata = fonts.tfm.id[font_id] -- we can use fonts.tfm.processor_id
---~ if tfmdata and tfmdata.shared and tfmdata.shared.processors then
---~ font_ids[font_id], found = tfmdata.shared.processors, true
---~ end
---~ end
---~ end
---~ if not found then
---~ return false
---~ else
---~ -- we need func to report a 'done'
---~ local done = false
---~ for font_id, font_func in pairs(font_ids) do
---~ for _, func in pairs(font_func) do -- per font
---~ local i = 1
---~ while true do
---~ local node = stack[i]
---~ if node and node[1] == 'glyph' and node[5] == font_id then
---~ i = func(stack,i,node)
---~ done = true
---~ end
---~ if i < #stack then
---~ i = i + 1
---~ else
---~ break
---~ end
---~ end
---~ end
---~ end
---~ for _, node in ipairs(stack) do
---~ if node then
---~ done = done or process_list(node)
---~ end
---~ end
---~ return done
---~ end
---~ end
---~ end
-
---~ function nodes.process_glyphs(t,...)
---~ input.start_timing(nodes)
---~ local done = nodes.do_process_glyphs(t)
---~ if done then
---~ t = nodes.collapse(t)
---~ end
---~ input.stop_timing(nodes)
---~ nodes.report(t,done)
---~ if done then
---~ return t
---~ else
---~ return true
---~ end
---~ end
-
---~ function nodes.do_process_glyphs(stack)
---~ local function process_list(node)
---~ local done = false
---~ if node and node[1] == 'hist' or node[1] == "vlist" then
---~ local attributes = node[3]
---~ if attributes then
---~ if attributes[1] then
---~ else
---~ local content = node[8]
---~ if type(content) == "table" then
---~ nodes.do_process_glyphs(content)
---~ end
---~ attributes[1] = 1
---~ done = true
---~ end
---~ else
---~ nodes.do_process_glyphs(node[8])
---~ node[3] = { 1 }
---~ done = true
---~ end
---~ end
---~ return done
---~ end
---~ if not stack or #stack == 0 then
---~ return false
---~ elseif #stack == 1 then
---~ return process_list(stack[1])
---~ else
---~ local font_ids, found = { }, false
---~ for _, node in ipairs(stack) do
---~ if node and node[1] == 'glyph' then
---~ local font_id = node[5]
---~ local tfmdata = fonts.tfm.id[font_id] -- we can use fonts.tfm.processor_id
---~ if tfmdata and tfmdata.shared and tfmdata.shared.processors then
---~ font_ids[font_id], found = tfmdata.shared.processors, true
---~ end
---~ end
---~ end
---~ if not found then
---~ return false
---~ else
---~ -- we need func to report a 'done'
---~ local done = false
---~ for font_id, font_func in pairs(font_ids) do
---~ for _, func in pairs(font_func) do -- per font
---~ local i = 1
---~ while true do
---~ local node = stack[i]
---~ if node and node[1] == 'glyph' and node[5] == font_id then
---~ i = func(stack,i,node)
---~ done = true
---~ end
---~ if i < #stack then
---~ i = i + 1
---~ else
---~ break
---~ end
---~ end
---~ end
---~ end
---~ for _, node in ipairs(stack) do
---~ if node then
---~ done = done or process_list(node)
---~ end
---~ end
---~ return done
---~ end
---~ end
---~ end
-
---~ function nodes.process_glyphs(t,...)
---~ if status.output_active then
---~ return true
---~ else
---~ input.start_timing(nodes)
---~ local done = nodes.do_process_glyphs(t)
---~ if done then
---~ t = nodes.collapse(t)
---~ end
---~ input.stop_timing(nodes)
---~ nodes.report(t,done)
---~ if done then
---~ return t
---~ else
---~ return true
---~ end
---~ end
---~ end
-
---~ do
-
---~ local function do_process_glyphs(stack)
---~ if not stack or #stack == 0 then
---~ return false
---~ elseif #stack == 1 and stack[1][1] ~= 'glyph' then
---~ return false
---~ else
---~ local font_ids, found = { }, false
---~ local fti = fonts.tfm.id
---~ for _, node in ipairs(stack) do
---~ if node and node[1] == 'glyph' then
---~ local font_id = node[5]
---~ local tfmdata = fti[font_id] -- we can use fonts.tfm.processor_id
---~ if tfmdata and tfmdata.shared and tfmdata.shared.processors then
---~ font_ids[font_id], found = tfmdata.shared.processors, true
---~ end
---~ end
---~ end
---~ if not found then
---~ return false
---~ else
---~ -- we need func to report a 'done'
---~ local done = false
---~ for font_id, font_func in pairs(font_ids) do
---~ for _, func in pairs(font_func) do -- per font
---~ local i = 1
---~ while true do
---~ local node = stack[i]
---~ if node and node[1] == 'glyph' and node[5] == font_id then
---~ i = func(stack,i,node)
---~ done = true
---~ end
---~ if i < #stack then
---~ i = i + 1
---~ else
---~ break
---~ end
---~ end
---~ end
---~ end
---~ for _, node in ipairs(stack) do
---~ if node then
---~ done = done or process_list(node)
---~ end
---~ end
---~ return done
---~ end
---~ end
---~ end
-
---~ local function do_collapse_glyphs(stack,existing_t)
---~ if stack then
---~ local t = existing_t or { }
---~ for _, node in pairs(stack) do
---~ if node then
---~ if node[3] then node[3][1] = nil end -- remove status bit / 1 sec faster on 15 sec
---~ if node[1] == 'inline' then
---~ local nodes = node[4]
---~ if #nodes == 1 then
---~ t[#t+1] = nodes[1]
---~ else
---~ do_collapse_glyphs(nodes,t)
---~ end
---~ else
---~ t[#t+1] = node
---~ end
---~ else
---~ -- deleted node
---~ end
---~ end
---~ return t
---~ else
---~ return stack
---~ end
---~ end
-
---~ function nodes.process_glyphs(t,...)
---~ --~ print(...)
---~ if status.output_active then -- not ok, we need a generic blocker, pagebody ! / attr tex.attibutes
---~ return true
---~ else
---~ input.start_timing(nodes)
---~ local done = do_process_glyphs(t)
---~ if done then
---~ t = do_collapse_glyphs(t)
---~ end
---~ input.stop_timing(nodes)
---~ nodes.report(t,done)
---~ if done then
---~ --~ texio.write_nl("RETURNING PROCESSED LIST")
---~ return t
---~ else
---~ --~ texio.write_nl("RETURNING SIGNAL")
---~ return true
---~ end
---~ end
---~ end
-
---~ end
diff --git a/tex/context/base/page-flt.tex b/tex/context/base/page-flt.tex
index 75531fbd5..47b5fddb1 100644
--- a/tex/context/base/page-flt.tex
+++ b/tex/context/base/page-flt.tex
@@ -366,6 +366,8 @@
{\ConvertToConstant\doifelse{#4}\v!none
{\global\nofloatcaptiontrue}
{\global\nofloatcaptionfalse}}%
+ \doif{\floatcaptionparameter\c!number}\v!none % new
+ {\global\nofloatcaptiontrue}%
\ifemptyfloatcaption \ifnofloatnumber
\global\nofloatcaptiontrue
\fi \fi}
@@ -513,20 +515,20 @@
% pas op, maxbreedte niet instellen als plaats=links/rechts
-\def\setlocalfloatdimensions#1#2#3% experimental / #3 box number #4 prefix
+\def\setlocalfloatdimensions#1%
{\global\sidefloatshift \zeropoint % duplicate
\global\sidefloatmaximum\zeropoint\relax % duplicate
\ifextrafloatactions
\ifdim\sidefloatdownshift=\zeropoint\else
- #3\setbox#2\vbox
- {\vskip\sidefloatdownshift\nointerlineskip\box#3}%
+ \global\setbox\floatbox\vbox
+ {\vskip\sidefloatdownshift\nointerlineskip\box\floatbox}%
\fi
\doifsomething{\floatparameter\c!minwidth}
{\scratchdimen\floatparameter\c!minwidth\relax
- \ifdim\wd#2<\scratchdimen
- #3\setbox#2\hbox to \scratchdimen
+ \ifdim\wd\floatbox<\scratchdimen
+ \global\setbox\floatbox\hbox to \scratchdimen
{\doifnot{\floatparameter\c!location}\v!left \hss
- \box#2%
+ \box\floatbox%
\doifnot{\floatparameter\c!location}\v!right\hss}%
\fi}%
% todo: rand / rug
@@ -537,17 +539,17 @@
{\letvalue{\??fl\currentfloat\c!maxwidth}\rightmarginwidth}}}%
\doifsomething{\floatparameter\c!maxwidth}
{\scratchdimen\floatparameter\c!maxwidth\relax
- \ifdim\wd#2>\scratchdimen
+ \ifdim\wd\floatbox>\scratchdimen
\doifcommonelse{\v!inright,\v!rightmargin,\v!rightedge
\v!inleft,\v!leftmargin,\v!leftedge}{#1}
{\global\sidefloatmaximum\scratchdimen}
- {#3\setbox#2\hbox to \scratchdimen
+ {\global\setbox\floatbox\hbox to \scratchdimen
{\doifcommonelse{\v!right,\v!left}{#1}
{\doifnotinset\v!right{#1}\hss
- \box#2%
+ \box\floatbox
\doifnotinset\v!left{#1}\hss}%
{\doifnot{\floatparameter\c!location}\v!left\hss
- \box#2%
+ \box\floatbox
\doifnot{\floatparameter\c!location}\v!right\hss}}}%
\fi}%
\fi}
@@ -2031,7 +2033,7 @@
\par
\edef\floatcaptiondirectives{\floatparameter\c!location,\floatcaptionparameter\c!location}%
\ifparfloat\@EA\dosetparfloat\else\@EA\dosetpagfloat\fi{#1}{#2}{#3}%
- \setlocalfloatdimensions{#1}\floatbox\global % tzt arg 3/4 weg
+ \setlocalfloatdimensions{#1}%
\setbox\floatbox\hbox
{\dosavefloatdata\restoretextcolor{\box\floatbox}}%
\global\floatheight\ht\floatbox
diff --git a/tex/context/base/page-ini.tex b/tex/context/base/page-ini.tex
index 2582f2dc0..783a358c9 100644
--- a/tex/context/base/page-ini.tex
+++ b/tex/context/base/page-ini.tex
@@ -634,6 +634,7 @@
\ifx\mkprocesscolumncontents\undefined\let\mkprocesscolumncontents\gobbleoneargument\fi
\ifx\mkprocesspagecontents \undefined\let\mkprocesspagecontents \gobbleoneargument\fi
+\ifx\mkprocessboxcontents \undefined\let\mkprocessboxcontents \gobbleoneargument\fi
\def\normalejectpenalty{-\@M} \let\ejectpenalty\normalejectpenalty
\def\normalsuperpenalty{-\@MM} \let\superpenalty\normalsuperpenalty
diff --git a/tex/context/base/page-lin.lua b/tex/context/base/page-lin.lua
new file mode 100644
index 000000000..91412d84f
--- /dev/null
+++ b/tex/context/base/page-lin.lua
@@ -0,0 +1,232 @@
+if not modules then modules = { } end modules ['page-lin'] = {
+ version = 1.001,
+ comment = "companion to page-lin.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- experimental
+
+nodes = nodes or { }
+nodes.lines = nodes.lines or { }
+nodes.lines.data = nodes.lines.data or { } -- start step tag
+
+do
+
+ -- if there is demand for it, we can support multiple numbering streams
+ -- and use more than one attibute
+
+ local hlist, vlist, whatsit = node.id('hlist'), node.id('vlist'), node.id('whatsit')
+
+ local display_math = attributes.numbers['display-math'] or 121
+ local line_number = attributes.numbers['line-number'] or 131
+ local line_reference = attributes.numbers['line-reference'] or 132
+
+ local current_list = { }
+ local cross_references = { }
+ local chunksize = 250 -- not used in boxed
+
+ local has_attribute = node.has_attribute
+ local traverse_id = node.traverse_id
+ local copy = node.copy
+ local format = string.format
+ local sprint = tex.sprint
+
+ local data = nodes.lines.data
+
+ nodes.lines.scratchbox = nodes.lines.scratchbox or 0
+
+ -- cross referencing
+
+ function nodes.lines.number(n)
+ local cr = cross_references[n] or 0
+ cross_references[n] = nil
+ return cr
+ end
+
+ local function resolve(n,m)
+ while n do
+ local id = n.id
+ if id == whatsit then
+ local a = has_attribute(n,line_reference)
+ if a then
+ cross_references[a] = m
+ end
+ elseif id == hlist or id == vlist then
+ resolve(n.list,m)
+ end
+ n = n.next
+ end
+ end
+
+ -- boxed variant
+
+ nodes.lines.boxed = { }
+
+ function nodes.lines.boxed.register(configuration)
+ data[#data+1] = configuration
+ return #data
+ end
+ function nodes.lines.boxed.setup(n,configuration)
+ local d = data[n]
+ if d then
+ for k,v in pairs(configuration) do d[k] = v end
+ else
+ data[n] = configuration
+ end
+ return n
+ end
+
+ local leftskip = nodes.leftskip
+
+ function nodes.lines.boxed.stage_one(n)
+ current_list = { }
+ local head = tex.box[n].list
+ local function check_number(n,a) -- move inline
+ local d = data[a]
+ if d then
+ local s = d.start
+ current_list[#current_list+1] = { n, s }
+ sprint(tex.ctxcatcodes, format("\\makenumber{%s}{%s}{%s}{%s}{%s}\\endgraf", d.tag or "", s, n.shift, n.width, leftskip(n.list)))
+ d.start = s + (d.step or 1)
+ end
+ end
+ for n in traverse_id(hlist,head) do -- attr test here and quit as soon as zero found
+ if n.height == 0 and n.depth == 0 then
+ -- skip funny hlists
+ else
+ local a = has_attribute(n.list,line_number)
+ if a and a > 0 then
+ if has_attribute(n,display_math) then
+ if nodes.is_display_math(n) then
+ check_number(n,a)
+ end
+ else
+ if node.first_character(n.list) then
+ check_number(n,a)
+ end
+ end
+ end
+ end
+ end
+ end
+
+ function nodes.lines.boxed.stage_two(n,m)
+ m = m or nodes.lines.scratchbox
+ local t, i = { }, 0
+ for l in traverse_id(hlist,tex.box[m].list) do
+ t[#t+1] = copy(l)
+ end
+ for _, l in ipairs(current_list) do
+ local n, m = l[1], l[2]
+ i = i + 1
+ t[i].next = n.list
+ n.list = t[i]
+ resolve(n,m)
+ end
+ end
+
+ -- flow variant
+ --
+ -- it's too hard to make this one robust, so for the moment it's not
+ -- available; todo: line refs
+
+ if false then
+
+ nodes.lines.flowed = { }
+
+ function nodes.lines.flowed.prepare()
+ for i=1,#data do
+ sprint(tex.ctxcatcodes,format("\\ctxlua{nodes.lines.flowed.prepare_a(%s)}\\ctxlua{nodes.lines.flowed.prepare_b(%s)}",i, i))
+ end
+ end
+
+ function nodes.lines.flowed.prepare_a(i)
+ local d = data[i]
+ local p = d.present
+ if p < chunksize then
+ local b = nodes.lines.scratchbox
+ sprint(tex.ctxcatcodes, format("{\\forgetall\\global\\setbox%s=\\vbox{\\unvbox%s\\relax\\offinterlineskip", b, b))
+ while p < chunksize do
+ sprint(tex.ctxcatcodes, format("\\mkmaketextlinenumber{%s}{%s}\\endgraf",d.start,1))
+ p = p + 1
+ d.start = d.start + d.step
+ end
+ d.present = p
+ sprint(tex.ctxcatcodes, "}}")
+ end
+ end
+
+ function nodes.lines.flowed.prepare_b(i)
+ local d = data[i]
+ local b = nodes.lines.scratchbox
+ local l = tex.box[b]
+ if l then
+ l = l.list
+ local n = d.numbers
+ while l do
+ if l.id == hlist then
+ local m = node.copy(l)
+ m.next = nil
+ if n then
+ n.next = m
+ else
+ d.numbers = m
+ end
+ n = m
+ end
+ l = l.next
+ end
+ end
+ tex.box[b] = nil
+ end
+
+ function nodes.lines.flowed.cleanup(i)
+ if i then
+ node.flush_list(data[i].numbers)
+ else
+ for i=1,#data do
+ node.flush_list(data[i].numbers)
+ end
+ end
+ end
+
+ function nodes.lines.flowed.apply(head)
+ local function check_number(n,a)
+ local d = data[a]
+ if d then
+ local m = d.numbers
+ if m then
+ d.numbers = m.next
+ m.next = n.list
+ n.list = m
+ d.present = d.present - 1
+ end
+ end
+ end
+ for n in node.traverse(head) do
+ local id = n.id
+ if id == hlist then
+ if n.height == 0 and n.depth == 0 then
+ -- skip funny hlists
+ else
+ local a = has_attribute(n,line_number)
+ if a and a > 0 then
+ if has_attribute(n,display_math) then
+ if nodes.is_display_math(n) then
+ check_number(n,a)
+ end
+ else
+ check_number(n,a)
+ end
+ end
+ end
+ end
+ end
+ return head, true
+ end
+
+ end
+
+end
diff --git a/tex/context/base/page-lin.tex b/tex/context/base/page-lin.mkii
index 66a09527c..357283252 100644
--- a/tex/context/base/page-lin.tex
+++ b/tex/context/base/page-lin.mkii
@@ -26,95 +26,6 @@
\newtoks\beforeeverylinenumbering
\newtoks\aftereverylinenumbering
-\def\setuplines
- {\dodoubleargument\getparameters[\??rg]}
-
-\def\startlines
- {\@@rgbefore
- \pushmacro\checkindentation
- \whitespace
- %\page[\v!preference]} gaat mis na koppen, nieuw: later \nobreak
- \begingroup
- \setupindenting[\@@rgindenting]%
- \typesettinglinestrue
- \setupwhitespace[\v!none]%
- \obeylines
- \ignorespaces
- \gdef\afterfirstobeyedline % tzt two pass, net als opsomming
- {\gdef\afterfirstobeyedline
- {\nobreak
- \global\let\afterfirstobeyedline\relax}}%
- \def\obeyedline
- {\par
- \afterfirstobeyedline
- \futurelet\next\dobetweenthelines}%
- \activatespacehandler\@@rgspace
- \GotoPar}
-
-\def\stoplines
- {\endgroup
- \popmacro\checkindentation
- \@@rgafter}
-
-%D When spacing is active we need to handle commands in
-%D a special way:
-%D
-%D \starttyping
-%D \setuplines[space=on]
-%D
-%D \startlines
-%D Let's talk about this{\ttsl\gobbleoneargument or}that.
-%D \stoplines
-%D
-%D \startlines
-%D Let's talk about this{\getvalue{ttsl}or}that.
-%D \stoplines
-%D \stoptyping
-%D
-%D One can indent in several ways:
-%D
-%D \starttyping
-%D \setupindenting[medium] \setuplines[indenting=odd] % no yes odd even
-%D
-%D \startlines
-%D first
-%D second
-%D third
-%D fourth
-%D \stoplines
-%D \stoptyping
-
-% this is not the natural place
-
-\def\installspacehandler#1#2% needs to set \obeyedspace
- {\setvalue{\??sr#1}{#2}}
-
-\installspacehandler \v!on
- {\obeyspaces
- \def\obeyedspace{\mathortext\normalspace{\dontleavehmode{\tt\controlspace}}}%
- \let\ =\obeyedspace}
-
-\installspacehandler \v!yes
- {\obeyspaces
- \def\obeyedspace{\mathortext\normalspace{\dontleavehmode \normalspace }}%
- \let\ =\obeyedspace}
-
-\installspacehandler \v!off
- {\normalspaces
- \let\obeyedspace\normalspace
- \let\ =\normalspace}
-
-\installspacehandler \v!fixed
- {\obeyspaces
- \def\obeyedspace{\mathortext\normalspace{\dontleavehmode\fixedspace}}%
- \let\ =\obeyedspace}
-
-\def\activatespacehandler#1%
- {\executeifdefined{\??sr#1}{\activatespacehandler\v!off}}
-
-\def\dobetweenthelines
- {\doifmeaningelse\next\obeyedline\@@rginbetween\donothing}
-
% het gebruik van \setlocalreference scheelt een hash entry
\def\dodoshowlinenumber % for use elsewhere, to be extended
@@ -381,46 +292,6 @@
\the\aftereverylinenumbering
\egroup}
-\def\emptylines
- {\dosingleempty\doemptylines}
-
-\def\doemptylines[#1]%
- {\endgraf\dorecurse{\iffirstargument#1\else3\fi}\crlf}
-
-\newcount\internalparagraphnumber
-
-\def\setupparagraphnumbering
- {\dosingleempty\dosetupparagraphnumbering}
-
-\def\dosetupparagraphnumbering[#1]%
- {\getparameters
- [\??ph][#1]%
- \processaction
- [\@@phstate]
- [\v!start=>\let\showparagraphnumber\doshowparagraphnumberA,
- \v!stop=>\let\showparagraphnumber\relax,
- \v!line=>\let\showparagraphnumber\doshowparagraphnumberB,
- \v!reset=>\global\internalparagraphnumber\zerocount
- \let\showparagraphnumber\doshowparagraphnumberA]}
-
-\def\dodoshowparagraphnumber
- {\global\advance\internalparagraphnumber \plusone
- \inleftmargin % \tf normalizes em
- {\tf{\doattributes\??ph\c!style\c!color{\the\internalparagraphnumber}}%
- \kern\@@phdistance}}
-
-\def\doshowparagraphnumberA
- {\ifprocessingverbatim
- \iflinepar\dodoshowparagraphnumber\fi
- \else
- \dodoshowparagraphnumber
- \fi}
-
-\def\doshowparagraphnumberB
- {\ifnumberinglines
- \doshowparagraphnumberA
- \fi}
-
\setuplinenumbering
[\c!method=,
\c!conversion=\v!numbers,
@@ -429,31 +300,13 @@
\c!location=\v!margin,
\c!style=,
\c!color=,
- \c!width=2em,
\c!prefix=,
- \c!referencing=\v!on]
-
-% new
-
-\setuplinenumbering
- [\c!width=\ifcase\linenumberlocation2em\else\v!margin\fi,
+ \c!referencing=\v!on,
+ \c!width=\ifcase\linenumberlocation2em\else\v!margin\fi,
\c!left=,
\c!right=,
\c!command=,
\c!distance=\zeropoint,
\c!align=\ifcase\linenumberlocation\v!right\or\v!right\or\v!left\fi]
-\setupparagraphnumbering
- [\c!state=\v!stop,
- \c!style=,
- \c!color=,
- \c!distance=\ifcase\linenumberlocation2em\else\!!zeropoint\fi]
-
-\setuplines
- [\c!before=\blank,
- \c!after=\blank,
- \c!inbetween=\blank,
- \c!indenting=\v!no,
- \c!space=\v!default]
-
\protect \endinput
diff --git a/tex/context/base/page-lin.mkiv b/tex/context/base/page-lin.mkiv
new file mode 100644
index 000000000..d442bbfeb
--- /dev/null
+++ b/tex/context/base/page-lin.mkiv
@@ -0,0 +1,424 @@
+%D \module
+%D [ file=page-lin,
+%D version=2007.11.29,
+%D title=\CONTEXT\ Core Macros,
+%D subtitle=Line Numbering,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{Context Core Macros / Line Numbering}
+
+\unprotect
+
+% low level interface
+
+\defineattribute[line-number]
+\defineattribute[line-reference]
+
+\registerctxluafile{page-lin}{1.001}
+% \ctxluafileload{page-lin}{1.001}
+
+\appendtoksonce\doresetattribute{line-number}\to\everyforgetall
+\appendtoksonce\dosetattribute{display-math}{1}\to\everybeforedisplayformula
+
+\newbox \linenumberscratchbox
+\newcount\linenumberchunk
+\newcount\linerefcounter
+
+\newconditional\tracelinenumbering
+
+\def\mkaddtextlinenumbers#1#2#3% box col max
+ {\bgroup
+ \ifcase#3\relax
+ \let\makenumber\mkskiplinenumber
+ \or
+ \let\makenumber\mkleftlinenumber
+ \else\ifcase#2\relax
+ \let\makenumber\mkskiplinenumber
+ \or
+ \let\makenumber\mkleftlinenumber
+ \else
+ \let\makenumber\mkrightlinenumber
+ \fi\fi
+ \mkprocesstextlinenumbers{#1}%
+ \egroup}
+
+\def\mkprocesstextlinenumbers#1%
+ {\setbox\linenumberscratchbox\vbox{\forgetall\offinterlineskip\ctxlua{nodes.lines.boxed.stage_one(\number#1)}}%
+ \ctxlua{nodes.lines.boxed.stage_two(\number#1,\number\linenumberscratchbox)}}% can move to lua code
+
+% id nr shift width leftskip
+
+\def\mkskiplinenumber #1#2#3#4#5{}
+\def\mkleftlinenumber #1#2#3#4#5{\hbox{\llap{#2\quad\hskip#3\scaledpoint}}}
+\def\mkrightlinenumber#1#2#3#4#5{\hbox{\rlap{\hskip#4\scaledpoint\hskip#3\scaledpoint\quad#2}}}
+
+\def\makenumber#1#2{\hbox{\llap{#1\quad\hskip#2\scaledpoint}}\endgraf}%
+
+\def\mkdoprocesspagecontents #1{\mkaddtextlinenumbers{#1}\plusone \plusone}
+\def\mkdoprocessboxcontents #1{\mkaddtextlinenumbers{#1}\plusone \plusone}
+\def\mkdoprocesscolumncontents#1{\mkaddtextlinenumbers{#1}\currentcolumn\nofcolumns}
+
+\def\mkcurrentstart{0}
+\def\mkcurrentstep {1}
+
+\def\mkdefinetextlinenumbering#1%
+ {\begingroup
+ \scratchcounter\ctxlua{tex.sprint(nodes.lines.boxed.register({start=\mkcurrentstart,step=\mkcurrentstep,tag="#1"}))}%
+ \setxvalue{ln:c:#1}{\number\scratchcounter}%
+ \endgroup}
+
+\def\mkstarttextlinenumbering#1#2%
+ {\globallet\mkprocesspagecontents \mkdoprocesspagecontents
+ \globallet\mkprocesscolumncontents\mkdoprocesscolumncontents
+ \ifcase#2\relax
+ % continue
+ \or
+ \mkdefinetextlinenumbering{#1}% restart
+ \fi
+ \dosetattribute{line-number}{\getvalue{ln:c:#1}}}
+
+\def\mksetuptextlinenumbering#1%
+ {\ctxlua{nodes.lines.boxed.setup(\getvalue{ln:c:#1},{start=\mkcurrentstart,step=\mkcurrentstep,tag="#1"})}}
+
+\def\mkstoptextlinenumbering
+ {\doresetattribute{line-number}}
+
+\def\mksomelinereference#1#2#3%
+ {\dontleavehmode\begingroup
+ \global\advance\linerefcounter\plusone
+ \dosetattribute{line-reference}\linerefcounter
+ #3\rawtextreference\s!lin{#2}{\noexpand\ctxlua{tex.sprint(nodes.lines.number(\the\linerefcounter))}}%
+ \endgroup}
+
+\def\mkstartlinereference#1{\mksomelinereference{#1}{lr:b:#1}{}\ignorespaces}
+\def\mkstoplinereference #1{\removeunwantedspaces\mksomelinereference{#1}{lr:e:#1}{}}
+
+\def\mklinestartreference#1[#2]{\in{#1}[lr:b:#2]} % not interfaced
+\def\mklinestopreference #1[#2]{\in{#1}[lr:e:#2]} % not interfaced
+
+% high level interface
+
+\newif\ifnumberinglines
+\newif\iftypesettinglines
+
+\let\currentlinenumbering\empty
+
+\chardef\linenumbermode = 1 % 0=continue, 1=restart
+\chardef\linenumberlocation = 1 % 0=middle, 1=left, 2=right, 3=inner, 4=outer, 5=text
+\chardef\linenumberalignment = 5 % 0=middle, 1=left, 2=right, 5=auto
+
+\newevery \beforeeverylinenumbering \relax
+\newevery \aftereverylinenumbering \relax
+\newevery \everylinenumber \relax
+
+\newdimen\linenumberwidth
+\newdimen\linenumberdistance
+
+\def\definelinenumbering
+ {\dosingleempty\dodefinelinenumbering}
+
+\def\dodefinelinenumbering[#1]%
+ {\def\currentlinenumbering{#1}%
+ \mkdefinetextlinenumbering\currentlinenumbering}
+
+\def\setuplinenumbering
+ {\dodoubleempty\dosetuplinenumbering}
+
+\def\dosetuplinenumbering[#1][#2]%
+ {\ifsecondargument
+ \def\currentlinenumbering{#1}%
+ \getparameters[\??rn#1][#2]%
+ \else
+ \let\currentlinenumbering\empty
+ \getparameters[\??rn][#1]%
+ \fi
+ \mksetuptextlinenumbering\currentlinenumbering}
+
+\def\linenumberparameter#1%
+ {\csname\??rn\ifcsname\??rn\currentlinenumbering#1\endcsname\currentlinenumbering\fi#1\endcsname}
+
+\def\linenumberattributes
+ {\doattributes{\??rn\ifcsname\??rn\currentlinenumbering\c!style\endcsname\currentlinenumbering\fi}}
+
+\definelinenumbering
+
+\setuplinenumbering
+ [\c!conversion=\v!numbers,
+ \c!start=1,
+ \c!step=1,
+ \c!continue=v!no,
+ \c!location=\v!left,
+ \c!style=,
+ \c!color=,
+ \c!width=2em,
+ \c!left=,
+ \c!right=,
+ \c!command=,
+ \c!distance=\zeropoint,
+ \c!align=\v!auto]
+
+\def\startlinenumbering
+ {\dodoubleempty\dostartlinenumbering}
+
+% no intermediate changes in values, define a class, otherwise each range
+% would need a number
+
+\def\mkcurrentstart{\linenumberparameter\c!start}
+\def\mkcurrentstep {\linenumberparameter\c!step }
+
+% todo: text
+
+\expandafter\chardef\csname\??rn:l:\v!middle \endcsname \zerocount
+\expandafter\chardef\csname\??rn:l:\v!left \endcsname \plusone
+\expandafter\chardef\csname\??rn:l:\v!margin \endcsname \plusone
+\expandafter\chardef\csname\??rn:l:\v!inmargin\endcsname \plusone
+\expandafter\chardef\csname\??rn:l:\v!inleft \endcsname \plusone
+\expandafter\chardef\csname\??rn:l:\v!right \endcsname \plustwo
+\expandafter\chardef\csname\??rn:l:\v!inright \endcsname \plustwo
+\expandafter\chardef\csname\??rn:l:\v!inner \endcsname \plusthree
+\expandafter\chardef\csname\??rn:l:\v!outer \endcsname \plusfour
+\expandafter\chardef\csname\??rn:l:\v!text \endcsname \plusfive
+
+\expandafter\chardef\csname\??rn:a:\v!middle \endcsname \zerocount
+\expandafter\chardef\csname\??rn:a:\v!right \endcsname \plusone
+\expandafter\chardef\csname\??rn:a:\v!flushleft \endcsname \plusone
+\expandafter\chardef\csname\??rn:a:\v!left \endcsname \plustwo
+\expandafter\chardef\csname\??rn:a:\v!flushright\endcsname \plustwo
+\expandafter\chardef\csname\??rn:a:\v!auto \endcsname \plusfive
+
+\def\dostartlinenumbering[#1][#2]% todo: c!continue
+ {\begingroup
+ \chardef\linenumbermode\plusone
+ \let\currentlinenumbering\empty
+ \ifsecondargument
+ \doif{#2}\v!continue{\chardef\linenumbermode\zerocount}%
+ \else\iffirstargument
+ \doifelse{#1}\v!continue
+ {\chardef\linenumbermode\zerocount}
+ {\def\currentlinenumbering{#1}}%
+ \fi\fi
+ \doifelse{\linenumberparameter\c!continue}\v!yes
+ {\chardef\linenumbermode\zerocount}%
+ \numberinglinestrue
+ \the\beforeeverylinenumbering
+ \mkstarttextlinenumbering\currentlinenumbering\linenumbermode}
+
+\def\stoplinenumbering
+ {\mkstoptextlinenumbering
+ \the\aftereverylinenumbering
+ \endgroup}
+
+% number placement
+
+\let\mkskiplinenumber \gobblefivearguments
+
+\def\mkdoinnerlinenumber{\doifoddpageelse\mkdoleftlinenumber\mkdorightlinenumber}
+\def\mkdoouterlinenumber{\doifoddpageelse\mkdorightlinenumber\mkdoleftlinenumber}
+
+\def\mkleftlinenumber
+ {\ifcase\linenumberlocation
+ \expandafter\mkdoleftlinenumber
+ \or
+ \expandafter\mkdoleftlinenumber
+ \or
+ \expandafter\mkdoleftlinenumber
+ \or
+ \expandafter\mkdoinnerlinenumber
+ \or
+ \expandafter\mkdoouterlinenumber
+ \fi}
+
+\def\mkrightlinenumber
+ {\ifcase\linenumberlocation
+ \expandafter\mkdorightlinenumber
+ \or
+ \expandafter\mkdorightlinenumber
+ \or
+ \expandafter\mkdorightlinenumber
+ \or
+ \expandafter\mkdoouterlinenumber
+ \or
+ \expandafter\mkdoinnerlinenumber
+ \fi}
+
+\def\mkaddtextlinenumbers#1#2#3% box col max
+ {\bgroup
+ \ifcase#3\relax
+ \let\makenumber\mkskiplinenumber
+ \or
+ \let\makenumber\mkleftlinenumber
+ \else\ifcase#2\relax
+ \let\makenumber\mkskiplinenumber
+ \or
+ \let\makenumber\mkdoleftlinenumber
+ \ifcase\linenumberlocation\or
+ \chardef\linenumberlocation\plusone
+ \or
+ \chardef\linenumberlocation\plustwo
+ \or
+ \chardef\linenumberlocation\plusone
+ \or
+ \chardef\linenumberlocation\plusone
+ \or
+ \chardef\linenumberlocation\plusone
+ \fi
+ \else
+ \let\makenumber\mkdorightlinenumber
+ \ifcase\linenumberlocation\or
+ \chardef\linenumberlocation\plustwo
+ \or
+ \chardef\linenumberlocation\plusone
+ \or
+ \chardef\linenumberlocation\plustwo
+ \or
+ \chardef\linenumberlocation\plustwo
+ \fi
+ \fi\fi
+ \mkprocesstextlinenumbers{#1}%
+ \egroup}
+
+\def\mkdoleftlinenumber #1#2#3#4#5%
+ {\hbox{\llap{\dosomelinenumber{#1}{2}{#2}{#5}\hskip#3\scaledpoint}}}
+\def\mkdorightlinenumber#1#2#3#4#5%
+ {\hbox{\rlap{\hskip#4\scaledpoint\hskip#3\scaledpoint\dosomelinenumber{#1}{1}{#2}{#5}}}}
+
+\def\dosomelinenumber#1#2#3#4% tag 1=left|2=right linenumber leftskip
+ {\begingroup
+ \def\currentlinenumbering{#1}%
+ \chardef\linenumberlocation \executeifdefined{\??rn:l:\linenumberparameter\c!location}\plusone % left
+ \chardef\linenumberalignment\executeifdefined{\??rn:a:\linenumberparameter\c!align }\plusfive % auto
+ \doifelse{\linenumberparameter\c!width}\v!margin
+ {\linenumberwidth\leftmarginwidth}
+ {\linenumberwidth\linenumberparameter\c!width}%
+ \linenumberdistance\linenumberparameter\c!distance\relax
+ \ifcase#2\relax\or\hskip\linenumberdistance\fi\relax
+ \ifnum\linenumberlocation=\plusfive
+ \scratchdimen\dimexpr#4\scaledpoint-\linenumberdistance\relax
+ \chardef\linenumberlocation\plusone
+ \else
+ \scratchdimen\zeropoint
+ \fi
+ \ifcase\linenumberalignment
+ \chardef\linenumberlocation\zerocount % middle
+ \or
+ \chardef\linenumberlocation\plusone % left
+ \or
+ \chardef\linenumberlocation\plustwo % right
+ \fi
+ \ifconditional\tracelinenumbering\ruledhbox\else\hbox\fi to \linenumberwidth
+ {\ifcase\linenumberlocation
+ \hss % middle
+ \or
+ % left
+ \or
+ \hss % right
+ \or
+ \doifoddpageelse\relax\hss % inner
+ \or
+ \doifoddpageelse\hss\relax % outer
+ \fi
+ \linenumberattributes\c!style\c!color
+ {\linenumberparameter\c!command
+ {\linenumberparameter\c!left
+ \convertnumber{\linenumberparameter\c!conversion}{#3}%
+ \linenumberparameter\c!right}}%
+ \ifcase\linenumberlocation
+ \hss % middle
+ \or
+ \hss % left
+ \or
+ % right
+ \or
+ \doifoddpageelse\hss\relax % inner
+ \or
+ \doifoddpageelse\relax\hss % outer
+ \fi}%
+ \ifcase#2\relax\or\or\hskip\linenumberdistance\fi\relax
+ \hskip-\scratchdimen
+ \the\everylinenumber
+ \endgroup}
+
+% left right inner outer
+
+% align: \alignedline\@@rnalign\v!right{\box0\hskip\@@rndistance}
+
+% referencing
+
+\def\someline [#1]{\mkstartlinereference{#1}\mkstoplinereference{#1}}
+\def\startline[#1]{\mkstartlinereference{#1}}
+\def\stopline [#1]{\mkstoplinereference {#1}}
+
+\def\mkshowstartlinereference#1%
+ {\ifconditional\tracelinenumbering
+ \setbox\scratchbox\hbox{\llap
+ {\vrule\!!width\onepoint\!!depth\strutdp\!!height.8\strutht\raise.85\strutht\hbox{\llap{\tt\txx#1}}}}%
+ \smashbox\scratchbox\box\scratchbox
+ \fi}
+\def\mkshowstoplinereference#1%
+ {\ifconditional\tracelinenumbering
+ \setbox\scratchbox\hbox{\rlap
+ {\raise.85\strutht\hbox{\rlap{\tt\txx#1}}\vrule\!!width\onepoint\!!depth\strutdp\!!height.8\strutht}}%
+ \smashbox\scratchbox\box\scratchbox
+ \fi}
+
+\def\mkstartlinereference#1{\mksomelinereference{#1}{lr:b:#1}{\mkshowstartlinereference{#1}}\ignorespaces}
+\def\mkstoplinereference #1{\removeunwantedspaces\mksomelinereference{#1}{lr:e:#1}{\mkshowstoplinereference{#1}}}
+
+% eventually we will do this in lua
+
+\def\doifelsesamelinereference#1#2#3%
+ {\doifreferencefoundelse{lr:b:#1}
+ {\let\fline\currenttextreference
+ \doifreferencefoundelse{lr:e:#1}
+ {\let\tline\currenttextreference
+ \ifx\fline\tline#2\else#3\fi}
+ {#2}}
+ {#2}}
+
+\def\inline#1[#2]%
+ {\doifelsenothing{#1}
+ {\doifelsesamelinereference{#2}
+ {\in{\leftlabeltext\v!line}{\rightlabeltext\v!line}[lr:b:#2]}
+ {\in{\leftlabeltext\v!lines}{}[lr:b:#2]--\in{}{\rightlabeltext\v!lines}[lr:e:#2]}}
+ {\doifelsesamelinereference{#2}
+ {\in{#1}[lr:b:#2]}
+ {\in{#1}[lr:b:#2]--\in[lr:e:#2]}}}
+
+\protect \endinput
+
+\iffalse % \iftrue
+
+ \appendtoks\ctxlua{nodes.lines.flowed.prepare()}\to\everyshipout
+ \appendtoks\ctxlua{nodes.lines.flowed.cleanup()}\to\everybye
+
+ \def\mkstarttextlinenumbering#1#2%
+ {\ctxlua{nodes.lines.flowed.prepare(#1)}%
+ \dosetattribute{line-number}{#1}}
+
+ \def\mkstoptextlinenumbering
+ {\doresetattribute{line-number}}
+
+ \def\mkmaketextlinenumber#1#2%
+ {\hbox \bgroup
+ \ifcase#2\relax
+ %
+ \or
+ \llap{#1\quad\hskip#2\scaledpoint}%
+ \else
+ \rlap{\hskip\textwidth\quad#1\hskip#2\scaledpoint}%
+ \fi
+ \egroup}
+
+ \ctxlua{
+ nodes.lines.scratchbox = \number\linenumberscratchbox ;
+ % callback.register('vpack_filter', nodes.lines.flowed.apply)
+ callback.register('post_linebreak_filter', nodes.lines.flowed.apply)
+ }
+
+\fi
diff --git a/tex/context/base/page-mul.tex b/tex/context/base/page-mul.tex
index 8efa1af18..5cd66a420 100644
--- a/tex/context/base/page-mul.tex
+++ b/tex/context/base/page-mul.tex
@@ -580,7 +580,6 @@
\edef\maxcolumndepth{\the\dp\currentcolumnbox}%
\fi}}
-
\chardef\multicolumntopflushmethod\plusone % 0: no correction, 1: correction when topstuff, 2: correction, 3: correction++
\chardef\multicolumntopalignmethod\plustwo % 0: nothing, 1: force grid, 2: follow grid
@@ -603,6 +602,7 @@
\setfalse\someprecolumncontent % will be set elsewhere
\else
\settrue\someprecolumncontent
+\mkprocessboxcontents\precolumnbox
\fi
\forgetall
\setmulticolumnsout
diff --git a/tex/context/base/page-par.tex b/tex/context/base/page-par.tex
new file mode 100644
index 000000000..fa1723d37
--- /dev/null
+++ b/tex/context/base/page-par.tex
@@ -0,0 +1,58 @@
+%D \module
+%D [ file=page-par, % copied from page-lin
+%D version=1997.03.31,
+%D title=\CONTEXT\ Core Macros,
+%D subtitle=Line Numbering,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{Context Core Macros / Paragraph Numbering}
+
+\unprotect
+
+\newcount\internalparagraphnumber
+
+\def\setupparagraphnumbering
+ {\dosingleempty\dosetupparagraphnumbering}
+
+\def\dosetupparagraphnumbering[#1]%
+ {\getparameters
+ [\??ph][#1]%
+ \processaction
+ [\@@phstate]
+ [\v!start=>\let\showparagraphnumber\doshowparagraphnumberA,
+ \v!stop=>\let\showparagraphnumber\relax,
+ \v!line=>\let\showparagraphnumber\doshowparagraphnumberB,
+ \v!reset=>\global\internalparagraphnumber\zerocount
+ \let\showparagraphnumber\doshowparagraphnumberA]}
+
+\def\dodoshowparagraphnumber
+ {\global\advance\internalparagraphnumber \plusone
+ \inleftmargin % \tf normalizes em
+ {\tf{\doattributes\??ph\c!style\c!color{\the\internalparagraphnumber}}%
+ \kern\@@phdistance}}
+
+\def\doshowparagraphnumberA
+ {\ifprocessingverbatim
+ \iflinepar\dodoshowparagraphnumber\fi
+ \else
+ \dodoshowparagraphnumber
+ \fi}
+
+\def\doshowparagraphnumberB
+ {\ifnumberinglines
+ \doshowparagraphnumberA
+ \fi}
+
+\setupparagraphnumbering
+ [\c!state=\v!stop,
+ \c!style=,
+ \c!color=,
+ \c!distance=\ifcase\linenumberlocation2em\else\!!zeropoint\fi] % will change
+
+\protect \endinput
diff --git a/tex/context/base/regi-ini.lua b/tex/context/base/regi-ini.lua
index 07dd54c1f..a12fd24a2 100644
--- a/tex/context/base/regi-ini.lua
+++ b/tex/context/base/regi-ini.lua
@@ -57,18 +57,21 @@ function regimes.load(regime)
end
function regimes.translate(line,regime)
- if regime and line and regimes.utf[regime] then
- return line:gsub("(.)", regimes.utf[regime])
- else
- return line
+ if regime and line then
+ local rur = regimes.utf[regime]
+ if rur then
+ return line:gsub("(.)", rur) -- () redundant
+ end
end
+ return line
end
function regimes.enable(regime)
if regimes.data[regime] then
regimes.currentregime = regime
+ local translate = regimes.translate
input.filters.dynamic_translator = function(s)
- return regimes.translate(s,regimes.currentregime)
+ return translate(s,regime)
end
else
regimes.disable()
diff --git a/tex/context/base/regi-ini.mkii b/tex/context/base/regi-ini.mkii
index e7c2a6792..a5b2cf177 100644
--- a/tex/context/base/regi-ini.mkii
+++ b/tex/context/base/regi-ini.mkii
@@ -45,7 +45,14 @@
\let\mkwalkregime \gobbleoneargument
\let\mkautosetregime\gobbletwoarguments
- \def\mkenableregime#1{\XeTeXinputencoding "#1"\relax}
+ % \def\mkenableregime#1%
+ % {\XeTeXinputencoding "#1"\relax}
+
+ \def\mkenableregime#1%
+ {\doifelse{#1}{utf}%
+ {\writestatus\m!regime{mapping utf to utf-8}%
+ \XeTeXinputencoding{utf-8}}
+ {\XeTeXinputencoding{#1}}}
\endXETEX
diff --git a/tex/context/base/s-abr-01.tex b/tex/context/base/s-abr-01.tex
index 614895f2d..101d9bcdb 100644
--- a/tex/context/base/s-abr-01.tex
+++ b/tex/context/base/s-abr-01.tex
@@ -42,6 +42,7 @@
\logo [BLUESKY] {BlueSky}
\logo [BMP] {bmp}
\logo [BSD] {bsd}
+\logo [CCODE] {c}
\logo [CALCMATH] {CalcMath}
\logo [CD] {cd}
\logo [CDROM] {cdrom}
@@ -144,10 +145,12 @@
\logo [METATEX] {Meta\TeX}
\logo [MIKTEX] {Mik\TeX}
\logo [MLTEX] {ml\TeX}
+\logo [METATYPE] {MetaType1}
\logo [MODULA] {Modula}
\logo [MOV] {mov}
\logo [MPS] {mps}
\logo [MPTOPDF] {mptopdf}
+\logo [MPLIB] {mplib}
\logo [MSDOS] {msdos}
\logo [MSWINDOWS] {MS~Windows}
\logo [MTXRUN] {mtxrun}
@@ -216,6 +219,7 @@
\logo [TEXNL] {tex-nl}
\logo [TEXSHOW] {\TeX show}
\logo [TEXSPELL] {\TeX spell}
+\logo [TEXGYRE] {\TeX\ Gyre}
\logo [TEXSYNC] {texsync}
\logo [TEXTMATE] {TextMate}
\logo [TEXTOOLS] {\TeX tools}
diff --git a/tex/context/base/sort-ini.mkii b/tex/context/base/sort-ini.mkii
index f9e813d5c..6c904e8cc 100644
--- a/tex/context/base/sort-ini.mkii
+++ b/tex/context/base/sort-ini.mkii
@@ -124,6 +124,10 @@
\readsysfile{\f!sortprefix lan}\donothing\donothing
\egroup}}}
+\prependtoks
+ \savesortdefinitions
+\to \everysavesortkeys
+
% \defineregister[one]
% \defineregister[two] \setupregister[two][language=cz]
%
diff --git a/tex/context/base/spec-tpd.tex b/tex/context/base/spec-tpd.tex
index 068ac8dfe..d2a1d73a7 100644
--- a/tex/context/base/spec-tpd.tex
+++ b/tex/context/base/spec-tpd.tex
@@ -539,7 +539,7 @@
\definespecial\dostartnonecolormode{\doPDFstartnonecolormode}
\definespecial\doregisternonecolor {\doPDFregisternonecolor}
-\def\doPDFregisterspotcolorname#1#2%
+\def\doPDFregisterspotcolorname#1#2% no need for escape in luatex
{\bgroup
\let\ascii\empty
\def\docommand##1%
diff --git a/tex/context/base/supp-pdf.tex b/tex/context/base/supp-pdf.tex
index 63dfb1f69..61f7b32e2 100644
--- a/tex/context/base/supp-pdf.tex
+++ b/tex/context/base/supp-pdf.tex
@@ -602,86 +602,8 @@
\expandafter\dohandleMPsequenceC
\fi#1}
-%\def\dohandleMPsequenceA#1 %
-% {\setMPargument{#1}%
-% \handleMPsequence}
-
\let\dohandleMPsequenceA\setMPsequence
-% \def\dohandleMPsequenceB#1 %
-% {\edef\somestring{#1}%
-% \ifx\somestring\PSmoveto
-% \edef\lastMPmoveX{\gMPa1}%
-% \edef\lastMPmoveY{\gMPa2}%
-% \PDFcode{\!MPgMPa1 \!MPgMPa2 m}%
-% \resetMPstack
-% \else\ifx\somestring\PSnewpath
-% \let\handleMPsequence\handleMPpath
-% \else\ifx\somestring\PSgsave
-% \PDFcode{q}%
-% \resetMPstack
-% \else\ifx\somestring\PSgrestore
-% \PDFcode{Q}%
-% \resetMPstack
-% \else\ifx\somestring\PSdtransform % == setlinewidth
-% \let\handleMPsequence\handleMPdtransform
-% % after that we will encounter more tokens until setlinewidth+pop
-% % or pop+setlinewidth which we catch next; we explicitly need to
-% % reset the stack since [] n setdash may follow; a more clever
-% % approach would be to read on till the condition is met, but it's
-% % the only pop / setlinewidth we will encounter so ...
-% \else\ifx\somestring\PSsetlinewidth
-% % already handled in dtransform
-% \resetMPstack
-% \else\ifx\somestring\PSpop
-% % already handled in dtransform
-% \resetMPstack
-% \else\ifx\somestring\PSconcat
-% \cleanupMPconcat
-% \PDFcode{\gMPa1 \gMPa2 \gMPa3 \gMPa4 \gMPa5 \gMPa6 cm}%
-% \resetMPstack
-% \else\ifx\somestring\PSsetrgbcolor
-% \handleMPrgbcolor
-% \resetMPstack
-% \else\ifx\somestring\PSsetcmykcolor
-% \handleMPcmykcolor
-% \resetMPstack
-% \else\ifx\somestring\PSsetgray
-% \handleMPgraycolor
-% \resetMPstack
-% \else\ifx\somestring\PStranslate
-% \PDFcode{1 0 0 1 \gMPa1 \gMPa2 cm}%
-% \resetMPstack
-% \else\ifx\somestring\PSsetdash
-% \handleMPsetdash
-% \resetMPstack
-% \else\ifx\somestring\PSsetlinejoin
-% \PDFcode{\gMPa1 j}%
-% \resetMPstack
-% \else\ifx\somestring\PSsetmiterlimit
-% \PDFcode{\gMPa1 M}%
-% \resetMPstack
-% \else\ifx\somestring\PSfshow
-% \PDFcode{n}%
-% \handleMPfshow
-% \resetMPstack
-% \else\ifx\somestring\PSsetlinecap
-% \PDFcode{\gMPa1 J}%
-% \resetMPstack
-% \else\ifx\somestring\PSrlineto
-% \PDFcode{\!MP\lastMPmoveX\space\!MP\lastMPmoveY\space l S}%
-% \resetMPstack
-% \else\ifx\somestring\PSscale
-% \PDFcode{\gMPa1 0 0 \gMPa2 0 0 cm}%
-% \resetMPstack
-% \else\ifx\somestring\PSspecial
-% \handleMPspecialcommand
-% \resetMPstack
-% \else
-% \handleMPgraphic% {#1}%
-% \fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi
-% \handleMPsequence}
-
\def\installMPSkeywordN#1#2%
{\expandafter\def\csname\@@MP:N:#1\endcsname{#2}}
@@ -969,11 +891,48 @@
%D finally I saw the light. It proved that we also had to
%D take care of \type{(split arguments)}.
+% \def\setMPfshowfont#1#2%
+% {\font\temp=#1\space at #2\relax\temp}
+
+% \startMPcode
+% draw btex Ga toch effe f\kern0ptietsen?{}` etex ;
+% \stopMPcode
+
+\newtoks \everyMPshowfont
+
\def\setMPfshowfont#1#2%
- {\font\temp=#1\space at #2\relax\temp}
+ {\font\temp=#1\space at #2\relax\temp
+ \the\everyMPshowfont}
\let\MPfshowcommand\empty
+%D The next hackery handles characters one by one. We only support this
+%D for the latest greatest \METAPOST\ binaries, the ones that escape
+%D problematic chars.
+
+\def\doflushMPtext#1%
+ {\edef\!!stringa{#1}%
+ \@EA\dodoflushMPtext\!!stringa\relax}
+
+\def\dodoflushMPtext
+ {\afterassignment\dododoflushMPtext\let\nexttoken=}
+
+\def\dododoflushMPtext
+ {\ifx\nexttoken\relax
+ % done
+ \else\ifx\nexttoken\char
+ \@EA\@EA\@EA\dodododoflushMPtext
+ \else
+ {\nexttoken}%
+ \@EA\@EA\@EA\dodoflushMPtext
+ \fi\fi}
+
+\def\dodododoflushMPtext
+ {\afterassignment\dododododoflushMPtext\scratchcounter}
+
+\def\dododododoflushMPtext
+ {{\char\scratchcounter}\let\next\dodoflushMPtext}
+
\def\dohandleMPfshow
{\bgroup
\setbox\scratchbox\hbox
@@ -1002,7 +961,7 @@
\MPfshowcommand
{\ifnum\nofMParguments=1
\def\do(##1){##1}%
- \dogMPa1%
+ \doflushMPtext{\dogMPa1}% only latest mp gets this treatment
\else
% we need to catch ( a ) (a a a) (\123 \123 \123) etc
\scratchcounter1
@@ -1162,7 +1121,7 @@
\or
\PDFcode{\!MPgMPs1 \!MPgMPs2 \!MPgMPs3 \!MPgMPs4 \!MPgMPs5 \!MPgMPs6 c}%
\or
- \PDFcode{\!MP\lastMPmoveX\space\!MP\lastMPmoveY\space l S}%
+ \PDFcode{\!MP\lastMPmoveX\space\!MP\lastMPmoveY\space l}%
\or
\edef\lastMPmoveX{\gMPs1}% evt \!MP here
\edef\lastMPmoveY{\gMPs2}%
@@ -2086,7 +2045,7 @@
{\ifcase\finiMPpath
\chardef\finiMPpath2
\let\handleMPsequence\processMPpath
- \fi}
+ \fi}
\installMPSkeywordP \PSstroke
{\ifcase\finiMPpath
\chardef\finiMPpath1
diff --git a/tex/context/base/syst-con.lua b/tex/context/base/syst-con.lua
index 9f35d68b6..519808e17 100644
--- a/tex/context/base/syst-con.lua
+++ b/tex/context/base/syst-con.lua
@@ -16,13 +16,20 @@ the top of <l n='luatex'/>'s char range but outside the unicode range.</p>
do
local char, flush, format = unicode.utf8.char, tex.sprint, string.format
+ function converters.hexstringtonumber(n) flush(tonumber(n,16)) end
+ function converters.octstringtonumber(n) flush(tonumber(n, 8)) end
+ function converters.rawcharacter (n) flush(char(0x110000+n)) end
+
function converters.lchexnumber (n) flush(format("%x" ,n)) end
function converters.uchexnumber (n) flush(format("%X" ,n)) end
function converters.lchexnumbers (n) flush(format("%02x",n)) end
function converters.uchexnumbers (n) flush(format("%02X",n)) end
function converters.octnumber (n) flush(format("%03o",n)) end
- function converters.hexstringtonumber(n) flush(tonumber(n,16)) end
- function converters.octstringtonumber(n) flush(tonumber(n, 8)) end
- function converters.rawcharacter (n) flush(char(0x110000+n)) end
+
+ function converters.lchexnumber (n) flush(("%x" ):format(n)) end
+ function converters.uchexnumber (n) flush(("%X" ):format(n)) end
+ function converters.lchexnumbers (n) flush(("%02x"):format(n)) end
+ function converters.uchexnumbers (n) flush(("%02X"):format(n)) end
+ function converters.octnumber (n) flush(("%03o"):format(n)) end
end
diff --git a/tex/context/base/syst-etx.tex b/tex/context/base/syst-etx.tex
index 093c3d17f..5d7ab9a65 100644
--- a/tex/context/base/syst-etx.tex
+++ b/tex/context/base/syst-etx.tex
@@ -209,6 +209,8 @@
\def\newmarks {\myalloc@8\marks \mathchardef\@@maxallocation}
\def\newlanguage{\myalloc@9\language\chardef \@@minallocation}
+\def\topofboxstack{\number\count24 }
+
%D Since in \CONTEXT\ we only have one math family left we
%D redefine \type {\newfam}.
diff --git a/tex/context/base/syst-mtx.tex b/tex/context/base/syst-mtx.tex
index 4e5e2ef26..0abd89e57 100644
--- a/tex/context/base/syst-mtx.tex
+++ b/tex/context/base/syst-mtx.tex
@@ -43,6 +43,8 @@
\def\newfam#1{\chardef#1=15 }
+\def\topofboxstack{\number\count24 }
+
\count18=1
\mathchardef\@@minallocation = 16
diff --git a/tex/context/base/syst-omg.tex b/tex/context/base/syst-omg.tex
index 1bedb195b..0aa409ccb 100644
--- a/tex/context/base/syst-omg.tex
+++ b/tex/context/base/syst-omg.tex
@@ -31,6 +31,8 @@
\def\newfam#1{\chardef#1=15 }
+\def\topofboxstack{\number\count24 }
+
\count18=1
\mathchardef\@@minallocation = 16
diff --git a/tex/context/base/thrd-trg.tex b/tex/context/base/thrd-trg.tex
index 3fce672d5..dda81a8af 100644
--- a/tex/context/base/thrd-trg.tex
+++ b/tex/context/base/thrd-trg.tex
@@ -6,6 +6,12 @@
\unprotect
+% compare: \number 0.5 \number -0.5 \number 1.5 \number -1.5
+%
+% so we need:
+
+\def\realnumber#1{\withoutpt\the\dimexpr#1\s!pt\relax} % brrr
+
\chardef \@iv = 4
\chardef \@xc = 90 % was \nin@ty
\chardef \@clxx = 180
@@ -51,22 +57,22 @@
%D calculations.
\def\calculatesin#1%
- {{\expandafter\ifx\csname sin \number#1\endcsname\relax
+ {{\expandafter\ifx\csname sin \realnumber{#1}\endcsname\relax
\!!dimena#1\onepoint
\tg@@sin
- \expandafter\xdef\csname sin \number#1\endcsname{\withoutpt\the\!!dimena}%
+ \expandafter\xdef\csname sin \realnumber{#1}\endcsname{\withoutpt\the\!!dimena}%
\fi}}
\def\calculatecos#1%
- {{\expandafter\ifx\csname cos \number#1\endcsname\relax
+ {{\expandafter\ifx\csname cos \realnumber{#1}\endcsname\relax
\!!dimena\@xc\onepoint
\advance\!!dimena-#1\onepoint
\tg@@sin
- \expandafter\xdef\csname cos \number#1\endcsname{\withoutpt\the\!!dimena}%
+ \expandafter\xdef\csname cos \realnumber{#1}\endcsname{\withoutpt\the\!!dimena}%
\fi}}
\def\calculatetan#1%
- {{\expandafter\ifx\csname tan \number#1\endcsname\relax
+ {{\expandafter\ifx\csname tan \realnumber{#1}\endcsname\relax
\calculatesin{#1}%
\calculatecos{#1}%
\!!dimena\calculatedcos{#1}\onepoint
@@ -74,40 +80,40 @@
\!!dimenb\calculatedsin{#1}\onepoint
\!!dimenb\@xvi@k\!!dimenb
\divide\!!dimenb\!!dimena
- \expandafter\xdef\csname tan \number#1\endcsname{\withoutpt\the\!!dimenb}%
+ \expandafter\xdef\csname tan \realnumber{#1}\endcsname{\withoutpt\the\!!dimenb}%
\fi}}
%D The results are accessed with:
-\def\calculatedsin#1{\csname sin \number#1\endcsname}
-\def\calculatedcos#1{\csname cos \number#1\endcsname}
-\def\calculatedtan#1{\csname tan \number#1\endcsname}
+\def\calculatedsin#1{\csname sin \realnumber{#1}\endcsname}
+\def\calculatedcos#1{\csname cos \realnumber{#1}\endcsname}
+\def\calculatedtan#1{\csname tan \realnumber{#1}\endcsname}
%D A more save implementation would be:
-\def\calculatedsin#1{\executeifdefined{sin \number#1}\!!zerocount}
-\def\calculatedcos#1{\executeifdefined{cos \number#1}\!!plusone}
-\def\calculatedtan#1{\executeifdefined{tan \number#1}\!!zerocount}
+\def\calculatedsin#1{\executeifdefined{sin \realnumber{#1}}\!!zerocount}
+\def\calculatedcos#1{\executeifdefined{cos \realnumber{#1}}\!!plusone }
+\def\calculatedtan#1{\executeifdefined{tan \realnumber{#1}}\!!zerocount}
%D A few values are predefined, although, on todays systems there
%D is no real reason for that. I've added the 270 ones and changed
%D the -90 tan. Also, I prefer text (\type {\!!..} instead of
%D counters \type {\..}.
-\expandafter\let\csname sin 0\endcsname\!!zerocount
-\expandafter\let\csname cos 0\endcsname\!!plusone
-\expandafter\let\csname sin 90\endcsname\!!plusone
-\expandafter\let\csname cos 90\endcsname\!!zerocount
-\expandafter\let\csname sin 180\endcsname\!!zerocount
-\expandafter\let\csname cos 180\endcsname\!!minusone
-\expandafter\let\csname sin 270\endcsname\!!minusone
-\expandafter\let\csname cos 270\endcsname\!!zerocount
+\expandafter\let\csname sin \realnumber{ 0}\endcsname\!!zerocount
+\expandafter\let\csname cos \realnumber{ 0}\endcsname\!!plusone
+\expandafter\let\csname sin \realnumber{ 90}\endcsname\!!plusone
+\expandafter\let\csname cos \realnumber{ 90}\endcsname\!!zerocount
+\expandafter\let\csname sin \realnumber{180}\endcsname\!!zerocount
+\expandafter\let\csname cos \realnumber{180}\endcsname\!!minusone
+\expandafter\let\csname sin \realnumber{270}\endcsname\!!minusone
+\expandafter\let\csname cos \realnumber{270}\endcsname\!!zerocount
-\expandafter\let\csname sin -90\endcsname\!!minusone
-\expandafter\let\csname cos -90\endcsname\!!zerocount
+\expandafter\let\csname sin \realnumber{-90}\endcsname\!!minusone
+\expandafter\let\csname cos \realnumber{-90}\endcsname\!!zerocount
-\expandafter\def\csname tan 90\endcsname{\writestatus\m!systems{infinite tan +90}}
-\expandafter\def\csname tan -90\endcsname{\writestatus\m!systems{infinite tan -90}}
+\expandafter\def\csname tan \realnumber{ 90}\endcsname{\writestatus\m!systems{infinite tan +90}}
+\expandafter\def\csname tan \realnumber{-90}\endcsname{\writestatus\m!systems{infinite tan -90}}
%D Usage: \type {\calculatesin{10}} and \type {\calculatedsin{10}}
diff --git a/tex/context/base/type-enc.tex b/tex/context/base/type-enc.tex
index 799eedbd5..d43f84326 100644
--- a/tex/context/base/type-enc.tex
+++ b/tex/context/base/type-enc.tex
@@ -30,7 +30,6 @@
% fallbacks, no math in latin modern
- \definefontsynonym[lmdunh10][cmdunh10]
\definefontsynonym[lmff10] [cmff10]
\definefontsynonym[lmfi10] [cmfi10]
\definefontsynonym[lmfib8] [cmfib8]
diff --git a/tex/context/base/type-one.tex b/tex/context/base/type-one.tex
index b724466a8..bc0d45027 100644
--- a/tex/context/base/type-one.tex
+++ b/tex/context/base/type-one.tex
@@ -246,6 +246,21 @@
\stoptypescript
\starttypescript [math] [modern,latin-modern]
+ \definefontsynonym [LMMathRoman5-Regular] [rm-lmr5]
+ \definefontsynonym [LMMathRoman6-Regular] [rm-lmr6]
+ \definefontsynonym [LMMathRoman7-Regular] [rm-lmr7]
+ \definefontsynonym [LMMathRoman8-Regular] [rm-lmr8]
+ \definefontsynonym [LMMathRoman9-Regular] [rm-lmr9]
+ \definefontsynonym [LMMathRoman10-Regular] [rm-lmr10]
+ \definefontsynonym [LMMathRoman12-Regular] [rm-lmr12]
+ \definefontsynonym [LMMathRoman17-Regular] [rm-lmr17]
+ \definefontsynonym [LMMathRoman5-Bold] [rm-lmbx5]
+ \definefontsynonym [LMMathRoman6-Bold] [rm-lmbx6]
+ \definefontsynonym [LMMathRoman7-Bold] [rm-lmbx7]
+ \definefontsynonym [LMMathRoman8-Bold] [rm-lmbx8]
+ \definefontsynonym [LMMathRoman9-Bold] [rm-lmbx9]
+ \definefontsynonym [LMMathRoman10-Bold] [rm-lmbx10]
+ \definefontsynonym [LMMathRoman12-Bold] [rm-lmbx12]
\definefontsynonym [LMMathSymbols5-BoldItalic] [lmbsy5]
\definefontsynonym [LMMathSymbols7-BoldItalic] [lmbsy7]
\definefontsynonym [LMMathSymbols10-BoldItalic][lmbsy10]
diff --git a/tex/context/base/type-otf.tex b/tex/context/base/type-otf.tex
index 7bfd1ee02..3e0f75961 100644
--- a/tex/context/base/type-otf.tex
+++ b/tex/context/base/type-otf.tex
@@ -188,6 +188,21 @@
\stoptypescript
\starttypescript [math] [modern,latin-modern]
+ \definefontsynonym [LMMathRoman5-Regular] [rm-lmr5]
+ \definefontsynonym [LMMathRoman6-Regular] [rm-lmr6]
+ \definefontsynonym [LMMathRoman7-Regular] [rm-lmr7]
+ \definefontsynonym [LMMathRoman8-Regular] [rm-lmr8]
+ \definefontsynonym [LMMathRoman9-Regular] [rm-lmr9]
+ \definefontsynonym [LMMathRoman10-Regular] [rm-lmr10]
+ \definefontsynonym [LMMathRoman12-Regular] [rm-lmr12]
+ \definefontsynonym [LMMathRoman17-Regular] [rm-lmr17]
+ \definefontsynonym [LMMathRoman5-Bold] [rm-lmbx5]
+ \definefontsynonym [LMMathRoman6-Bold] [rm-lmbx6]
+ \definefontsynonym [LMMathRoman7-Bold] [rm-lmbx7]
+ \definefontsynonym [LMMathRoman8-Bold] [rm-lmbx8]
+ \definefontsynonym [LMMathRoman9-Bold] [rm-lmbx9]
+ \definefontsynonym [LMMathRoman10-Bold] [rm-lmbx10]
+ \definefontsynonym [LMMathRoman12-Bold] [rm-lmbx12]
\definefontsynonym [LMMathSymbols5-BoldItalic] [lmbsy5]
\definefontsynonym [LMMathSymbols7-BoldItalic] [lmbsy7]
\definefontsynonym [LMMathSymbols10-BoldItalic][lmbsy10]
diff --git a/tex/context/base/type-tmf.tex b/tex/context/base/type-tmf.tex
index f4445209c..9783ad736 100644
--- a/tex/context/base/type-tmf.tex
+++ b/tex/context/base/type-tmf.tex
@@ -82,7 +82,7 @@
\stoptypescript
\starttypescript [math] [modern,computer-modern,latin-modern] [name]
- \definefontsynonym [MathRoman] [LMRoman-Regular]
+ \definefontsynonym [MathRoman] [LMMathRoman-Regular]
\definefontsynonym [MathExtension] [LMMathExtension-Regular]
\definefontsynonym [MathItalic] [LMMathItalic-Italic]
\definefontsynonym [MathSymbol] [LMMathSymbols-Italic]
@@ -111,14 +111,14 @@
\stoptypescript
\starttypescript [boldmath] [modern,computer-modern,latin-modern] [name]
- \definefontsynonym [MathRoman] [LMRoman-Bold]
+ \definefontsynonym [MathRoman] [LMMathRoman-Bold]
\definefontsynonym [MathExtension] [LMMathExtension-Regular]
\definefontsynonym [MathItalic] [LMMathItalic-BoldItalic]
\definefontsynonym [MathSymbol] [LMMathSymbols-BoldItalic]
\stoptypescript
\starttypescript [bfmath] [modern,computer-modern,latin-modern] [name]
- \definefontsynonym [MathRomanBold] [LMRoman-Bold]
+ \definefontsynonym [MathRomanBold] [LMMathRoman-Bold]
\definefontsynonym [MathExtension] [LMMathExtension-Regular]
\definefontsynonym [MathItalicBold] [LMMathItalic-BoldItalic]
\definefontsynonym [MathSymbolBold] [LMMathSymbols-BoldItalic]
@@ -250,6 +250,8 @@
\stoptypescript
\starttypescript [math] [modern,latin-modern]
+ \definefontsynonym [LMMathRoman-Regular] [LMMathRoman10-Regular]
+ \definefontsynonym [LMMathRoman-Bold] [LMMathRoman10-Bold]
\definefontsynonym [LMMathSymbols-BoldItalic] [LMMathSymbols10-BoldItalic]
\definefontsynonym [LMMathSymbols-Italic] [LMMathSymbols10-Italic]
\definefontsynonym [LMMathExtension-Regular] [LMMathExtension10-Regular]
@@ -257,6 +259,21 @@
\definefontsynonym [LMMathItalic-BoldItalic] [LMMathItalic10-BoldItalic]
\stoptypescript
+% can boldmath and bfmath be joined?
+\starttypescript [boldmath] [modern,computer-modern,latin-modern] [name]
+ \definefontsynonym [LMMathRoman-Bold] [LMMathRoman10-Bold]
+ \definefontsynonym [LMMathExtension-Regular] [LMMathExtension10-Regular]
+ \definefontsynonym [LMMathItalic-BoldItalic] [LMMathItalic10-BoldItalic]
+ \definefontsynonym [LMMathSymbols-BoldItalic] [LMMathSymbols10-BoldItalic]
+\stoptypescript
+
+\starttypescript [bfmath] [modern,computer-modern,latin-modern] [name]
+ \definefontsynonym [LMMathRoman-Bold] [LMMathRoman10-Bold]
+ \definefontsynonym [LMMathExtension-Regular] [LMMathExtension10-Regular]
+ \definefontsynonym [LMMathItalic-BoldItalic] [LMMathItalic10-BoldItalic]
+ \definefontsynonym [LMMathSymbols-BoldItalic] [LMMathSymbols10-BoldItalic]
+\stoptypescript
+
\starttypescript [serif] [modern,latin-modern,computer-modern]
\definefontsynonym [cmr5] [LMRoman5-Regular]
\definefontsynonym [cmr6] [LMRoman6-Regular]
diff --git a/tex/context/base/type-xtx.tex b/tex/context/base/type-xtx.tex
index be833bb2a..32ff858d1 100644
--- a/tex/context/base/type-xtx.tex
+++ b/tex/context/base/type-xtx.tex
@@ -12,7 +12,7 @@
%C details.
%D Here are some fonts definitions that can get you started with
-%D \XETEX (for more details see Adam's MyWay documents).
+%D \XETEX\ (for more details see Adam's MyWay documents).
%D
%D Most typescripts in this file are mostly independent of the other
%D typescript files. Generally, you can speed things up a lot by
@@ -36,7 +36,7 @@
%D
%D \starttyping
%D \definetypeface[basic][rm][Xserif][Baskerville]
-%D \definetypeface[basic][ss][Xsans] [Optima Regular][default][encoding=uc,rscale=.87]
+%D \definetypeface[basic][ss][Xsans] [Optima Regular][default][features=default,rscale=.87]
%D \definetypeface[basic][tt][Xmono] [Courier] [default]
%D \stoptyping
%D
@@ -108,10 +108,12 @@
%D HH: todo, define feature set switch mapping=tex-tex
-\definefontsynonym[Dummy] [name:\typescripttwo\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[DummyItalic] [name:\typescripttwo/I\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[DummyBold] [name:\typescripttwo/B\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[DummyBoldItalic][name:\typescripttwo/BI\xetexcolon mapping=tex-text][encoding=uc]
+\definefontsynonym[Dummy] [name:\typescripttwo] [features=default]
+\definefontsynonym[DummyItalic] [name:\typescripttwo/I] [features=default]
+\definefontsynonym[DummyBold] [name:\typescripttwo/B] [features=default]
+\definefontsynonym[DummyBoldItalic][name:\typescripttwo/BI][features=default]
+
+\definefontsynonym[DummyCaps] [name:\typescripttwo] [features=smallcaps]
\definefontsynonym[Serif] [Dummy]
\definefontsynonym[SerifBold] [DummyBold]
@@ -119,16 +121,18 @@
\definefontsynonym[SerifBoldItalic] [DummyBoldItalic]
\definefontsynonym[SerifSlanted] [DummyItalic]
\definefontsynonym[SerifBoldSlanted][DummyBoldItalic]
-\definefontsynonym[SerifCaps] [Dummy]
+\definefontsynonym[SerifCaps] [DummyCaps]
\stoptypescript
\starttypescript[Xsans][all][name]
-\definefontsynonym[DummySans] [name:\typescripttwo\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[DummySansItalic] [name:\typescripttwo/I\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[DummySansBold] [name:\typescripttwo/B\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[DummySansBoldItalic][name:\typescripttwo/BI\xetexcolon mapping=tex-text][encoding=uc]
+\definefontsynonym[DummySans] [name:\typescripttwo] [features=default]
+\definefontsynonym[DummySansItalic] [name:\typescripttwo/I] [features=default]
+\definefontsynonym[DummySansBold] [name:\typescripttwo/B] [features=default]
+\definefontsynonym[DummySansBoldItalic][name:\typescripttwo/BI][features=default]
+
+\definefontsynonym[DummySansCaps] [name:\typescripttwo] [features=smallcaps]
\definefontsynonym[Sans] [DummySans]
\definefontsynonym[SansBold] [DummySansBold]
@@ -136,16 +140,18 @@
\definefontsynonym[SansBoldItalic] [DummySansBoldItalic]
\definefontsynonym[SansSlanted] [DummySansItalic]
\definefontsynonym[SansBoldSlanted][DummySansBoldItalic]
-\definefontsynonym[SansCaps] [DummySans]
+\definefontsynonym[SansCaps] [DummySansCaps]
\stoptypescript
\starttypescript[Xmono][all][name]
-\definefontsynonym[DummyMono] [name:\typescripttwo] [encoding=uc]
-\definefontsynonym[DummyMonoItalic] [name:\typescripttwo/I] [encoding=uc]
-\definefontsynonym[DummyMonoBold] [name:\typescripttwo/B] [encoding=uc]
-\definefontsynonym[DummyMonoBoldItalic][name:\typescripttwo/BI][encoding=uc]
+\definefontsynonym[DummyMono] [name:\typescripttwo]
+\definefontsynonym[DummyMonoItalic] [name:\typescripttwo/I]
+\definefontsynonym[DummyMonoBold] [name:\typescripttwo/B]
+\definefontsynonym[DummyMonoBoldItalic][name:\typescripttwo/BI]
+
+% TODO: smallcaps without other features
\definefontsynonym[Mono] [DummyMono]
\definefontsynonym[MonoBold] [DummyMonoBold]
@@ -204,10 +210,10 @@
\starttypescript[serif][times][uc]
-\definefontsynonym[Times-Roman] [name:Times Roman\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[Times-Italic] [name:Times Italic\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[Times-Bold] [name:Times Bold\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[Times-BoldItalic][name:Times Bold Italic\xetexcolon mapping=tex-text;][encoding=uc]
+\definefontsynonym[Times-Roman] [name:Times Roman] [features=default]
+\definefontsynonym[Times-Italic] [name:Times Italic] [features=default]
+\definefontsynonym[Times-Bold] [name:Times Bold] [features=default]
+\definefontsynonym[Times-BoldItalic][name:Times Bold Italic][features=default]
\stoptypescript
@@ -215,14 +221,14 @@
\starttypescript[serif][palatino][uc]
-\definefontsynonym[Palatino] [name:Book Antiqua\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[Palatino-Italic] [name:Book Antiqua Italic\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[Palatino-Bold] [name:Book Antiqua Bold\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[Palatino-BoldItalic] [name:Book Antiqua Bold Italic\xetexcolon mapping=tex-text][encoding=uc]
+\definefontsynonym[Palatino] [name:Book Antiqua] [features=default]
+\definefontsynonym[Palatino-Italic] [name:Book Antiqua Italic] [features=default]
+\definefontsynonym[Palatino-Bold] [name:Book Antiqua Bold] [features=default]
+\definefontsynonym[Palatino-BoldItalic] [name:Book Antiqua Bold Italic][features=default]
-\definefontsynonym[Palatino-Slanted] [Palatino-Italic]
-\definefontsynonym[Palatino-BoldSlanted][Palatino-BoldItalic]
-\definefontsynonym[Palatino-Caps] [Palatino]
+\definefontsynonym[Palatino-Slanted] [Palatino-Italic]
+\definefontsynonym[Palatino-BoldSlanted] [Palatino-BoldItalic]
+\definefontsynonym[Palatino-Caps] [Palatino]
\stoptypescript
@@ -231,10 +237,10 @@
\starttypescript[sans][helvetica][uc]
-\definefontsynonym[Helvetica] [name:Helvetica Neue\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[Helvetica-Oblique] [name:Helvetica Neue Italic\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[Helvetica-Bold] [name:Helvetica Neue Bold\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[Helvetica-BoldOblique][name:Helvetica Neue Bold Italic\xetexcolon mapping=tex-text][encoding=uc]
+\definefontsynonym[Helvetica] [name:Helvetica Neue] [features=default]
+\definefontsynonym[Helvetica-Oblique] [name:Helvetica Neue Italic] [features=default]
+\definefontsynonym[Helvetica-Bold] [name:Helvetica Neue Bold] [features=default]
+\definefontsynonym[Helvetica-BoldOblique][name:Helvetica Neue Bold Italic][features=default]
\stoptypescript
@@ -244,9 +250,9 @@
\starttypescript[mono][courier][uc]
-\definefontsynonym[Courier] [name:Courier\xetexcolon mapping=tex-text] [encoding=uc]
+\definefontsynonym[Courier] [name:Courier]
\definefontsynonym[Courier-Oblique] [Courier]
-\definefontsynonym[Courier-Bold] [name:Courier Bold\xetexcolon mapping=tex-text][encoding=uc]
+\definefontsynonym[Courier-Bold] [name:Courier Bold]
\definefontsynonym[Courier-BoldOblique][Courier-Bold]
\stoptypescript
@@ -284,8 +290,8 @@
\starttypescript[sans][lucidagrande][uc]
-\definefontsynonym[LucidaGrande] [name:Lucida Grande\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[LucidaGrandeBold][name:Lucida Grande Bold\xetexcolon mapping=tex-text][encoding=uc]
+\definefontsynonym[LucidaGrande] [name:Lucida Grande] [features=default]
+\definefontsynonym[LucidaGrandeBold][name:Lucida Grande Bold][features=default]
\stoptypescript
@@ -302,11 +308,11 @@
\stoptypescript
\starttypescript[sans][optima][uc]
-\definefontsynonym[Optima] [name:Optima Regular\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[OptimaItalic] [name:Optima Italic\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[OptimaBold] [name:Optima Bold\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[OptimaBoldItalic][name:Optima Bold Italic\xetexcolon mapping=tex-text][encoding=uc]
-\definefontsynonym[OptimaBlack] [name:Optima ExtraBlack\xetexcolon mapping=tex-text] [encoding=uc]
+\definefontsynonym[Optima] [name:Optima Regular] [features=default]
+\definefontsynonym[OptimaItalic] [name:Optima Italic] [features=default]
+\definefontsynonym[OptimaBold] [name:Optima Bold] [features=default]
+\definefontsynonym[OptimaBoldItalic][name:Optima Bold Italic][features=default]
+\definefontsynonym[OptimaBlack] [name:Optima ExtraBlack] [features=default]
\stoptypescript
\starttypescript[sans][optima][name]
@@ -323,12 +329,12 @@
\starttypescript[sans][gillsans,gillsanslt][uc]
-\definefontsynonym[GillSans] [name:Gill Sans\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[GillSansItalic] [name:Gill Sans Italic\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[GillSansBold] [name:Gill Sans Bold\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[GillSansBoldItalic] [name:Gill Sans Bold Italic\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[GillSansLight] [name:Gill Sans Light\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[GillSansLightItalic][name:Gill Sans Light Italic\xetexcolon mapping=tex-text][encoding=uc]
+\definefontsynonym[GillSans] [name:Gill Sans] [features=default]
+\definefontsynonym[GillSansItalic] [name:Gill Sans Italic] [features=default]
+\definefontsynonym[GillSansBold] [name:Gill Sans Bold] [features=default]
+\definefontsynonym[GillSansBoldItalic] [name:Gill Sans Bold Italic] [features=default]
+\definefontsynonym[GillSansLight] [name:Gill Sans Light] [features=default]
+\definefontsynonym[GillSansLightItalic][name:Gill Sans Light Italic][features=default]
\stoptypescript
@@ -430,10 +436,10 @@
\starttypescript[serif][timesnewroman][uc]
-\definefontsynonym[MSTimes] [name:Times New Roman\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[MSTimesItalic] [name:Times New Roman Italic\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[MSTimesBold] [name:Times New Roman Bold\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[MSTimesBoldItalic][name:Times New Roman Bold Italic\xetexcolon mapping=tex-text][encoding=uc]
+\definefontsynonym[MSTimes] [name:Times New Roman] [features=default]
+\definefontsynonym[MSTimesItalic] [name:Times New Roman Italic] [features=default]
+\definefontsynonym[MSTimesBold] [name:Times New Roman Bold] [features=default]
+\definefontsynonym[MSTimesBoldItalic][name:Times New Roman Bold Italic][features=default]
\stoptypescript
@@ -451,10 +457,10 @@
\starttypescript[sans][arial][uc]
-\definefontsynonym[Arial] [name:Arial\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[ArialItalic] [name:Arial Italic\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[ArialBold] [name:Arial Bold\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[ArialBoldItalic][name:Arial Bold Italic\xetexcolon mapping=tex-text][encoding=uc]
+\definefontsynonym[Arial] [name:Arial] [features=default]
+\definefontsynonym[ArialItalic] [name:Arial Italic] [features=default]
+\definefontsynonym[ArialBold] [name:Arial Bold] [features=default]
+\definefontsynonym[ArialBoldItalic][name:Arial Bold Italic][features=default]
\stoptypescript
@@ -476,10 +482,10 @@
\starttypescript [serif] [lucida] [uc]
- \definefontsynonym [LucidaBright] [name:Lucida Bright\xetexcolon mapping=tex-text] [encoding=uc]
- \definefontsynonym [LucidaBright-Demi] [name:Lucida Bright Demibold\xetexcolon mapping=tex-text][encoding=uc]
- \definefontsynonym [LucidaBright-DemiItalic] [name:Lucida Bright Demibold\xetexcolon mapping=tex-text][encoding=uc]
- \definefontsynonym [LucidaBright-Italic] [name:Lucida Bright\xetexcolon mapping=tex-text] [encoding=uc]
+ \definefontsynonym [LucidaBright] [name:Lucida Bright] [features=default]
+ \definefontsynonym [LucidaBright-Demi] [name:Lucida Bright Demibold][features=default]
+ \definefontsynonym [LucidaBright-DemiItalic] [name:Lucida Bright Demibold][features=default]
+ \definefontsynonym [LucidaBright-Italic] [name:Lucida Bright] [features=default]
\definefontsynonym [LucidaBrightSmallcaps] [LucidaBright]
\definefontsynonym [LucidaBrightSmallcaps-Demi][LucidaBright-Demi]
@@ -488,10 +494,10 @@
\stoptypescript
\starttypescript [sans] [lucida] [uc]
- \definefontsynonym [LucidaSans] [name:Lucida Sans Regular\xetexcolon mapping=tex-text] [encoding=uc]
- \definefontsynonym [LucidaSans-Demi] [name:Lucida Sans Demibold Roman\xetexcolon mapping=tex-text] [encoding=uc]
- \definefontsynonym [LucidaSans-DemiItalic][name:Lucida Sans Demibold Italic\xetexcolon mapping=tex-text][encoding=uc]
- \definefontsynonym [LucidaSans-Italic] [name:Lucida Sans Italic\xetexcolon mapping=tex-text] [encoding=uc]
+ \definefontsynonym [LucidaSans] [name:Lucida Sans Regular] [features=default]
+ \definefontsynonym [LucidaSans-Demi] [name:Lucida Sans Demibold Roman] [features=default]
+ \definefontsynonym [LucidaSans-DemiItalic][name:Lucida Sans Demibold Italic][features=default]
+ \definefontsynonym [LucidaSans-Italic] [name:Lucida Sans Italic] [features=default]
\definefontsynonym [LucidaSans-Bold] [LucidaSans-Demi]
\definefontsynonym [LucidaSans-BoldItalic][LucidaSans-DemiItalic]
@@ -509,7 +515,7 @@
\starttypescript [calligraphy] [lucida] [uc]
- \definefontsynonym[LucidaCalligraphy-Italic][name:Lucida Calligraphy Italic\xetexcolon mapping=tex-text][encoding=uc]
+ \definefontsynonym[LucidaCalligraphy-Italic][name:Lucida Calligraphy Italic][features=default]
\stoptypescript
@@ -517,16 +523,16 @@
\starttypescript[handwriting][lucida][uc]
- \definefontsynonym[LucidaHandwriting-Italic][name:Lucida Handwriting Italic\xetexcolon mapping=tex-text][encoding=uc]
+ \definefontsynonym[LucidaHandwriting-Italic][name:Lucida Handwriting Italic][features=default]
\stoptypescript
\starttypescript[fax][lucida][uc]
- \definefontsynonym[LucidaFax] [name:Lucida Fax Regular\xetexcolon mapping=tex-text] [encoding=uc]
- \definefontsynonym[LucidaFax-Demi] [name:Lucida Fax Demibold\xetexcolon mapping=tex-text] [encoding=uc]
- \definefontsynonym[LucidaFax-DemiItalic][name:Lucida Fax Demibold Italic\xetexcolon mapping=tex-text][encoding=uc]
- \definefontsynonym[LucidaFax-Italic] [name:Lucida Fax Italic\xetexcolon mapping=tex-text] [encoding=uc]
+ \definefontsynonym[LucidaFax] [name:Lucida Fax Regular] [features=default]
+ \definefontsynonym[LucidaFax-Demi] [name:Lucida Fax Demibold] [features=default]
+ \definefontsynonym[LucidaFax-DemiItalic][name:Lucida Fax Demibold Italic][features=default]
+ \definefontsynonym[LucidaFax-Italic] [name:Lucida Fax Italic] [features=default]
\stoptypescript
@@ -536,8 +542,8 @@
\starttypescript[serif][gentium][uc]
-\definefontsynonym[Gentium] [name:Gentium\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[GentiumItalic][name:Gentium Italic\xetexcolon mapping=tex-text][encoding=uc]
+\definefontsynonym[Gentium] [name:Gentium] [features=default]
+\definefontsynonym[GentiumItalic][name:Gentium Italic][features=default]
\stoptypescript
diff --git a/tex/context/base/unic-ini.tex b/tex/context/base/unic-ini.tex
index f0f219182..1b2af197b 100644
--- a/tex/context/base/unic-ini.tex
+++ b/tex/context/base/unic-ini.tex
@@ -233,16 +233,13 @@
% beware, this may change: #1 rawchar (=> `#1 and such, saves tokens)
\def\utftwouniglph#1#2%
- {\@EA\unicodechar\@EA{\the\numexpr\utf@a*(#1-\utf@d)+%
- `#2-\utf@g\relax}}
+ {\@EA\unicodechar\@EA{\the\numexpr\utf@a*(#1-\utf@d)+`#2-\utf@g\relax}}
\def\utfthreeuniglph#1#2#3%
- {\@EA\unicodechar\@EA{\the\numexpr\utf@b*(#1-\utf@e)+%
- \utf@a*(`#2-\utf@g)+`#3-\utf@g\relax}}
+ {\@EA\unicodechar\@EA{\the\numexpr\utf@b*(#1-\utf@e)+\utf@a*(`#2-\utf@g)+`#3-\utf@g\relax}}
\def\utffouruniglph#1#2#3#4%
- {\@EA\unicodechar\@EA{\the\numexpr\utf@c*(#1-\utf@f)+%
- \utf@b*(`#2-\utf@g)+\utf@a*(`#3-\utf@g)+`#4-\utf@g\relax}}
+ {\@EA\unicodechar\@EA{\the\numexpr\utf@c*(#1-\utf@f)+\utf@b*(`#2-\utf@g)+\utf@a*(`#3-\utf@g)+`#4-\utf@g\relax}}
% \def\keeputfcharacters
% {\def\utftwouniglph ##1##2{\rawcharacter{##1}\string##2}%
@@ -749,9 +746,7 @@
\endXETEX
\beginTEX
-
\def\numbertoutf#1{[\number#1]}
-
\endTEX
\def\uchartoutf#1#2%
diff --git a/tex/context/base/x-cml.mkiv b/tex/context/base/x-cml.mkiv
index 12c4600f0..372165092 100644
--- a/tex/context/base/x-cml.mkiv
+++ b/tex/context/base/x-cml.mkiv
@@ -20,16 +20,10 @@
\unprotect
\startxmlsetups xml:cml:process
- \xmlstrip {\xmldocument} {cml:chem|cml:ichem|cml:dchem}
- \xmlstrip {\xmldocument} {cml:reaction}
- \xmlstrip {\xmldocument} {cml:molecule}
- \xmlstrip {\xmldocument} {cml:ion}
- \xmlstrip {\xmldocument} {cml:structure}
- \xmlgrab {\xmldocument} {cml:chem|cml:dchem|cml:ichem|cml:reaction|cml:molecule|cml:ion|cml:atom} {*}
- \xmlgrab {\xmldocument} {cml:structure|cml:component|cml:forever} {*}
-% \xmlgrab {\xmldocument} {cml:*} {*}
+ \xmlstrip {\xmldocument} {cml:chem|cml:ichem|cml:dchem|cml:reaction|cml:molecule|cml:ion|cml:structure}
+ \xmlgrab {\xmldocument} {cml:*} {*}
\xmlgrab {\xmldocument} {cml:gives|cml:equilibrium|cml:mesomeric} {cml:arrow}
\xmlgrab {\xmldocument} {cml:plus|cml:minus|cml:equal} {cml:operator}
\xmlgrab {\xmldocument} {cml:bond|cml:singlebond|cml:doublebound|cml:triplebond} {cml:bond}
diff --git a/tex/context/base/x-fo.tex b/tex/context/base/x-fo.tex
index 1ce18e509..9c09fe0db 100644
--- a/tex/context/base/x-fo.tex
+++ b/tex/context/base/x-fo.tex
@@ -2470,7 +2470,7 @@ leader-pattern-width=12pt,
\startsetups fo:position:absolute:stop
\egroup
% evt uitstellen tot otr, zodat text/realfolio is solved
- \edef\FOpartag{p:\parposcounter}
+ \edef\FOpartag{p:\number\parposcounter}
\edef\FOtxttag{text:\realfolio}
\FOcontainerWW\MPplus\FOpartag{1}{0pt}
\FOcontainerHH\zeropoint % todo: add anchors to each 'object'
diff --git a/tex/context/base/x-mml.mkiv b/tex/context/base/x-mml.mkiv
index a0731b45d..8cbf07f08 100644
--- a/tex/context/base/x-mml.mkiv
+++ b/tex/context/base/x-mml.mkiv
@@ -26,8 +26,9 @@
% \xmlregistersetup{xml:mml:process}
+
\startxmlsetups xml:mml:process
- \xmlutfize {\xmldocument}
+% \xmlutfize {\xmldocument}
\xmlgrab {\xmldocument} {mml:*} {*}
\stopxmlsetups
diff --git a/tex/context/base/x-newmml.tex b/tex/context/base/x-newmml.tex
index cd44914d4..0d17aa4a5 100644
--- a/tex/context/base/x-newmml.tex
+++ b/tex/context/base/x-newmml.tex
@@ -548,7 +548,7 @@
\def\MMLccartesian#1%
{\def\MMLcsep{+}#1\getXMLentity{imaginaryi}}
-% float will be obsolete, an is replace by e-notation
+% float will be obsolete, and is replace by e-notation
\def\MMLcfloat#1%
{\doifelse\@@MMLfloatsymbol\v!no
diff --git a/tex/context/base/xtag-exp.tex b/tex/context/base/xtag-exp.tex
index 7f289dbf5..14c3c7a71 100644
--- a/tex/context/base/xtag-exp.tex
+++ b/tex/context/base/xtag-exp.tex
@@ -27,8 +27,7 @@
\def\startXMLreading
{\increment \XMLfilenesting \relax
- \ifnum\XMLfilenesting=1
-%\ifcase\XMLfilenesting\or
+ \ifcase\XMLfilenesting\or
\ifprocessingXML
\let\restoreXMLelements\relax
\else
@@ -38,8 +37,7 @@
\fi}
\def\stopXMLreading
- {\ifnum\XMLfilenesting=1
-%\ifcase\XMLfilenesting\or
+ {\ifcase\XMLfilenesting\or
\restoreXMLelements
\let\restoreXMLelements\relax
\fi
diff --git a/tex/context/interface/cont-cz.xml b/tex/context/interface/cont-cz.xml
index 11174cbbe..c82e72d0c 100644
--- a/tex/context/interface/cont-cz.xml
+++ b/tex/context/interface/cont-cz.xml
@@ -18,7 +18,10 @@
<cd:constant type="vnejsi"/>
<cd:constant type="vlevo"/>
<cd:constant type="vpravo"/>
+ <cd:constant type="flushleft"/>
+ <cd:constant type="flushright"/>
<cd:constant type="nastred"/>
+ <cd:constant type="center"/>
<cd:constant type="normalni"/>
<cd:constant type="ne"/>
<cd:constant type="ano"/>
@@ -4224,6 +4227,7 @@
<cd:parameter name="cislo">
<cd:constant type="ano"/>
<cd:constant type="ne"/>
+ <cd:constant type="zadny"/>
</cd:parameter>
<cd:parameter name="mezi">
<cd:constant type="cd:command"/>
@@ -6065,6 +6069,9 @@
<cd:parameter name="zarovnani">
<cd:resolve name="align"/>
</cd:parameter>
+ <cd:parameter name="symzarovnani">
+ <cd:resolve name="symalign"/>
+ </cd:parameter>
<cd:parameter name="odsadpristi">
<cd:resolve name="indentnext"/>
</cd:parameter>
diff --git a/tex/context/interface/cont-de.xml b/tex/context/interface/cont-de.xml
index 5cae23ebe..3b2638f60 100644
--- a/tex/context/interface/cont-de.xml
+++ b/tex/context/interface/cont-de.xml
@@ -18,7 +18,10 @@
<cd:constant type="aussen"/>
<cd:constant type="links"/>
<cd:constant type="rechts"/>
+ <cd:constant type="flushleft"/>
+ <cd:constant type="flushright"/>
<cd:constant type="mittig"/>
+ <cd:constant type="center"/>
<cd:constant type="normal"/>
<cd:constant type="nein"/>
<cd:constant type="ja"/>
@@ -4224,6 +4227,7 @@
<cd:parameter name="nummer">
<cd:constant type="ja"/>
<cd:constant type="nein"/>
+ <cd:constant type="kein"/>
</cd:parameter>
<cd:parameter name="zwischen">
<cd:constant type="cd:command"/>
@@ -6065,6 +6069,9 @@
<cd:parameter name="ausrichtung">
<cd:resolve name="align"/>
</cd:parameter>
+ <cd:parameter name="symausrichtung">
+ <cd:resolve name="symalign"/>
+ </cd:parameter>
<cd:parameter name="ziehefolgendeein">
<cd:resolve name="indentnext"/>
</cd:parameter>
diff --git a/tex/context/interface/cont-en.xml b/tex/context/interface/cont-en.xml
index 9ff000b2c..69884654e 100644
--- a/tex/context/interface/cont-en.xml
+++ b/tex/context/interface/cont-en.xml
@@ -18,7 +18,10 @@
<cd:constant type="outer"/>
<cd:constant type="left"/>
<cd:constant type="right"/>
+ <cd:constant type="flushleft"/>
+ <cd:constant type="flushright"/>
<cd:constant type="middle"/>
+ <cd:constant type="center"/>
<cd:constant type="normal"/>
<cd:constant type="no"/>
<cd:constant type="yes"/>
@@ -4224,6 +4227,7 @@
<cd:parameter name="number">
<cd:constant type="yes"/>
<cd:constant type="no"/>
+ <cd:constant type="none"/>
</cd:parameter>
<cd:parameter name="inbetween">
<cd:constant type="cd:command"/>
@@ -6065,6 +6069,9 @@
<cd:parameter name="align">
<cd:resolve name="align"/>
</cd:parameter>
+ <cd:parameter name="symalign">
+ <cd:resolve name="symalign"/>
+ </cd:parameter>
<cd:parameter name="indentnext">
<cd:resolve name="indentnext"/>
</cd:parameter>
diff --git a/tex/context/interface/cont-fr.xml b/tex/context/interface/cont-fr.xml
index aed3c6efb..b74b60e4f 100644
--- a/tex/context/interface/cont-fr.xml
+++ b/tex/context/interface/cont-fr.xml
@@ -18,7 +18,10 @@
<cd:constant type="exterieur"/>
<cd:constant type="gauche"/>
<cd:constant type="droite"/>
+ <cd:constant type="flushleft"/>
+ <cd:constant type="flushright"/>
<cd:constant type="milieu"/>
+ <cd:constant type="center"/>
<cd:constant type="normal"/>
<cd:constant type="non"/>
<cd:constant type="oui"/>
@@ -4224,6 +4227,7 @@
<cd:parameter name="numero">
<cd:constant type="oui"/>
<cd:constant type="non"/>
+ <cd:constant type="rien"/>
</cd:parameter>
<cd:parameter name="entre">
<cd:constant type="cd:command"/>
@@ -6065,6 +6069,9 @@
<cd:parameter name="alignement">
<cd:resolve name="align"/>
</cd:parameter>
+ <cd:parameter name="symalignement">
+ <cd:resolve name="symalign"/>
+ </cd:parameter>
<cd:parameter name="indentesuivant">
<cd:resolve name="indentnext"/>
</cd:parameter>
diff --git a/tex/context/interface/cont-it.xml b/tex/context/interface/cont-it.xml
index 0b5c91052..fbdb3acd2 100644
--- a/tex/context/interface/cont-it.xml
+++ b/tex/context/interface/cont-it.xml
@@ -18,7 +18,10 @@
<cd:constant type="esterno"/>
<cd:constant type="sinistra"/>
<cd:constant type="destra"/>
+ <cd:constant type="flushleft"/>
+ <cd:constant type="flushright"/>
<cd:constant type="centro"/>
+ <cd:constant type="center"/>
<cd:constant type="normale"/>
<cd:constant type="no"/>
<cd:constant type="si"/>
@@ -4224,6 +4227,7 @@
<cd:parameter name="numero">
<cd:constant type="si"/>
<cd:constant type="no"/>
+ <cd:constant type="nessuno"/>
</cd:parameter>
<cd:parameter name="tra">
<cd:constant type="cd:command"/>
@@ -6065,6 +6069,9 @@
<cd:parameter name="allinea">
<cd:resolve name="align"/>
</cd:parameter>
+ <cd:parameter name="symallinea">
+ <cd:resolve name="symalign"/>
+ </cd:parameter>
<cd:parameter name="rientrasuccessivo">
<cd:resolve name="indentnext"/>
</cd:parameter>
diff --git a/tex/context/interface/cont-nl.xml b/tex/context/interface/cont-nl.xml
index 02399e877..84d9a4b17 100644
--- a/tex/context/interface/cont-nl.xml
+++ b/tex/context/interface/cont-nl.xml
@@ -18,7 +18,10 @@
<cd:constant type="buiten"/>
<cd:constant type="links"/>
<cd:constant type="rechts"/>
+ <cd:constant type="lijnlinks"/>
+ <cd:constant type="lijnrechts"/>
<cd:constant type="midden"/>
+ <cd:constant type="centreer"/>
<cd:constant type="normaal"/>
<cd:constant type="nee"/>
<cd:constant type="ja"/>
@@ -4224,6 +4227,7 @@
<cd:parameter name="nummer">
<cd:constant type="ja"/>
<cd:constant type="nee"/>
+ <cd:constant type="geen"/>
</cd:parameter>
<cd:parameter name="tussen">
<cd:constant type="cd:command"/>
@@ -6065,6 +6069,9 @@
<cd:parameter name="uitlijnen">
<cd:resolve name="align"/>
</cd:parameter>
+ <cd:parameter name="symuitlijnen">
+ <cd:resolve name="symalign"/>
+ </cd:parameter>
<cd:parameter name="springvolgendein">
<cd:resolve name="indentnext"/>
</cd:parameter>
diff --git a/tex/context/interface/cont-ro.xml b/tex/context/interface/cont-ro.xml
index 0f34ae02a..0d2b5af05 100644
--- a/tex/context/interface/cont-ro.xml
+++ b/tex/context/interface/cont-ro.xml
@@ -18,7 +18,10 @@
<cd:constant type="extern"/>
<cd:constant type="stanga"/>
<cd:constant type="dreapta"/>
+ <cd:constant type="flushleft"/>
+ <cd:constant type="flushright"/>
<cd:constant type="centru"/>
+ <cd:constant type="center"/>
<cd:constant type="normal"/>
<cd:constant type="nu"/>
<cd:constant type="da"/>
@@ -4224,6 +4227,7 @@
<cd:parameter name="numar">
<cd:constant type="da"/>
<cd:constant type="nu"/>
+ <cd:constant type="niciunul"/>
</cd:parameter>
<cd:parameter name="intre">
<cd:constant type="cd:command"/>
@@ -6065,6 +6069,9 @@
<cd:parameter name="aliniere">
<cd:resolve name="align"/>
</cd:parameter>
+ <cd:parameter name="symaliniere">
+ <cd:resolve name="symalign"/>
+ </cd:parameter>
<cd:parameter name="aliniaturmator">
<cd:resolve name="indentnext"/>
</cd:parameter>
diff --git a/tex/context/interface/keys-cz.xml b/tex/context/interface/keys-cz.xml
index 068d08fec..077d6f2b4 100644
--- a/tex/context/interface/keys-cz.xml
+++ b/tex/context/interface/keys-cz.xml
@@ -1,6 +1,6 @@
<?xml version="1.0"?>
-<cd:interface xmlns:cd="http://www.pragma-ade.com/commands" name="context" language="cz" version="2007.09.28 11:58">
+<cd:interface xmlns:cd="http://www.pragma-ade.com/commands" name="context" language="cz" version="2007.12.05 13:56">
<cd:variables>
<cd:variable name="lesshyphenation" value="lesshyphenation"/>
@@ -125,6 +125,7 @@
<cd:variable name="lines" value="radky"/>
<cd:variable name="framedtext" value="oramovanytext"/>
<cd:variable name="quotation" value="citace"/>
+ <cd:variable name="blockquote" value="blockquote"/>
<cd:variable name="quote" value="citovat"/>
<cd:variable name="speech" value="speech"/>
<cd:variable name="itemize" value="vycet"/>
@@ -446,6 +447,7 @@
<cd:constant name="inbetween" value="mezi"/>
<cd:constant name="type" value="typ"/>
<cd:constant name="align" value="zarovnani"/>
+ <cd:constant name="symalign" value="symzarovnani"/>
<cd:constant name="urlspace" value="prostorurl"/>
<cd:constant name="urlalternative" value="urlalternativa"/>
<cd:constant name="from" value="z"/>
diff --git a/tex/context/interface/keys-de.xml b/tex/context/interface/keys-de.xml
index a7488a838..6a02edfe4 100644
--- a/tex/context/interface/keys-de.xml
+++ b/tex/context/interface/keys-de.xml
@@ -1,6 +1,6 @@
<?xml version="1.0"?>
-<cd:interface xmlns:cd="http://www.pragma-ade.com/commands" name="context" language="de" version="2007.09.28 11:58">
+<cd:interface xmlns:cd="http://www.pragma-ade.com/commands" name="context" language="de" version="2007.12.05 13:56">
<cd:variables>
<cd:variable name="lesshyphenation" value="lesshyphenation"/>
@@ -125,6 +125,7 @@
<cd:variable name="lines" value="zeilen"/>
<cd:variable name="framedtext" value="umrahmtertext"/>
<cd:variable name="quotation" value="zitat"/>
+ <cd:variable name="blockquote" value="blockquote"/>
<cd:variable name="quote" value="zitieren"/>
<cd:variable name="speech" value="speech"/>
<cd:variable name="itemize" value="aufzaehlung"/>
@@ -446,6 +447,7 @@
<cd:constant name="inbetween" value="zwischen"/>
<cd:constant name="type" value="typ"/>
<cd:constant name="align" value="ausrichtung"/>
+ <cd:constant name="symalign" value="symausrichtung"/>
<cd:constant name="urlspace" value="urlspatium"/>
<cd:constant name="urlalternative" value="urlalternative"/>
<cd:constant name="from" value="von"/>
diff --git a/tex/context/interface/keys-en.xml b/tex/context/interface/keys-en.xml
index a9a2d6262..2bc21ab38 100644
--- a/tex/context/interface/keys-en.xml
+++ b/tex/context/interface/keys-en.xml
@@ -1,6 +1,6 @@
<?xml version="1.0"?>
-<cd:interface xmlns:cd="http://www.pragma-ade.com/commands" name="context" language="en" version="2007.09.28 11:58">
+<cd:interface xmlns:cd="http://www.pragma-ade.com/commands" name="context" language="en" version="2007.12.05 13:56">
<cd:variables>
<cd:variable name="lesshyphenation" value="lesshyphenation"/>
@@ -125,6 +125,7 @@
<cd:variable name="lines" value="lines"/>
<cd:variable name="framedtext" value="framedtext"/>
<cd:variable name="quotation" value="quotation"/>
+ <cd:variable name="blockquote" value="blockquote"/>
<cd:variable name="quote" value="quote"/>
<cd:variable name="speech" value="speech"/>
<cd:variable name="itemize" value="itemize"/>
@@ -446,6 +447,7 @@
<cd:constant name="inbetween" value="inbetween"/>
<cd:constant name="type" value="type"/>
<cd:constant name="align" value="align"/>
+ <cd:constant name="symalign" value="symalign"/>
<cd:constant name="urlspace" value="urlspace"/>
<cd:constant name="urlalternative" value="urlalternative"/>
<cd:constant name="from" value="from"/>
diff --git a/tex/context/interface/keys-fr.xml b/tex/context/interface/keys-fr.xml
index cbbcd926c..dee31e9e3 100644
--- a/tex/context/interface/keys-fr.xml
+++ b/tex/context/interface/keys-fr.xml
@@ -1,6 +1,6 @@
<?xml version="1.0"?>
-<cd:interface xmlns:cd="http://www.pragma-ade.com/commands" name="context" language="fr" version="2007.09.28 11:58">
+<cd:interface xmlns:cd="http://www.pragma-ade.com/commands" name="context" language="fr" version="2007.12.05 13:56">
<cd:variables>
<cd:variable name="lesshyphenation" value="lesshyphenation"/>
@@ -125,6 +125,7 @@
<cd:variable name="lines" value="lignes"/>
<cd:variable name="framedtext" value="texteencadre"/>
<cd:variable name="quotation" value="citation"/>
+ <cd:variable name="blockquote" value="blockquote"/>
<cd:variable name="quote" value="citer"/>
<cd:variable name="speech" value="discours"/>
<cd:variable name="itemize" value="lister"/>
@@ -446,6 +447,7 @@
<cd:constant name="inbetween" value="entre"/>
<cd:constant name="type" value="type"/>
<cd:constant name="align" value="alignement"/>
+ <cd:constant name="symalign" value="symalignement"/>
<cd:constant name="urlspace" value="espaceurl"/>
<cd:constant name="urlalternative" value="alternativeurl"/>
<cd:constant name="from" value="de"/>
diff --git a/tex/context/interface/keys-it.xml b/tex/context/interface/keys-it.xml
index d2061a6c7..bf14c9594 100644
--- a/tex/context/interface/keys-it.xml
+++ b/tex/context/interface/keys-it.xml
@@ -1,6 +1,6 @@
<?xml version="1.0"?>
-<cd:interface xmlns:cd="http://www.pragma-ade.com/commands" name="context" language="it" version="2007.09.28 11:58">
+<cd:interface xmlns:cd="http://www.pragma-ade.com/commands" name="context" language="it" version="2007.12.05 13:56">
<cd:variables>
<cd:variable name="lesshyphenation" value="lesshyphenation"/>
@@ -125,6 +125,7 @@
<cd:variable name="lines" value="righe"/>
<cd:variable name="framedtext" value="testoincorniciato"/>
<cd:variable name="quotation" value="citazione"/>
+ <cd:variable name="blockquote" value="blockquote"/>
<cd:variable name="quote" value="menzione"/>
<cd:variable name="speech" value="speech"/>
<cd:variable name="itemize" value="elenco"/>
@@ -446,6 +447,7 @@
<cd:constant name="inbetween" value="tra"/>
<cd:constant name="type" value="type"/>
<cd:constant name="align" value="allinea"/>
+ <cd:constant name="symalign" value="symallinea"/>
<cd:constant name="urlspace" value="spaziourl"/>
<cd:constant name="urlalternative" value="alternativaurl"/>
<cd:constant name="from" value="da"/>
diff --git a/tex/context/interface/keys-nl.xml b/tex/context/interface/keys-nl.xml
index d38260a5e..859ee5984 100644
--- a/tex/context/interface/keys-nl.xml
+++ b/tex/context/interface/keys-nl.xml
@@ -1,6 +1,6 @@
<?xml version="1.0"?>
-<cd:interface xmlns:cd="http://www.pragma-ade.com/commands" name="context" language="nl" version="2007.09.28 11:58">
+<cd:interface xmlns:cd="http://www.pragma-ade.com/commands" name="context" language="nl" version="2007.12.05 13:56">
<cd:variables>
<cd:variable name="lesshyphenation" value="lesshyphenation"/>
@@ -125,6 +125,7 @@
<cd:variable name="lines" value="regels"/>
<cd:variable name="framedtext" value="kadertekst"/>
<cd:variable name="quotation" value="citaat"/>
+ <cd:variable name="blockquote" value="blokcitaat"/>
<cd:variable name="quote" value="citeer"/>
<cd:variable name="speech" value="spraak"/>
<cd:variable name="itemize" value="opsomming"/>
@@ -446,6 +447,7 @@
<cd:constant name="inbetween" value="tussen"/>
<cd:constant name="type" value="type"/>
<cd:constant name="align" value="uitlijnen"/>
+ <cd:constant name="symalign" value="symuitlijnen"/>
<cd:constant name="urlspace" value="urlspatie"/>
<cd:constant name="urlalternative" value="urlvariant"/>
<cd:constant name="from" value="van"/>
diff --git a/tex/context/interface/keys-ro.xml b/tex/context/interface/keys-ro.xml
index d08f55ab3..6c8083631 100644
--- a/tex/context/interface/keys-ro.xml
+++ b/tex/context/interface/keys-ro.xml
@@ -1,6 +1,6 @@
<?xml version="1.0"?>
-<cd:interface xmlns:cd="http://www.pragma-ade.com/commands" name="context" language="ro" version="2007.09.28 11:58">
+<cd:interface xmlns:cd="http://www.pragma-ade.com/commands" name="context" language="ro" version="2007.12.05 13:56">
<cd:variables>
<cd:variable name="lesshyphenation" value="lesshyphenation"/>
@@ -125,6 +125,7 @@
<cd:variable name="lines" value="linii"/>
<cd:variable name="framedtext" value="textinconjurat"/>
<cd:variable name="quotation" value="citat"/>
+ <cd:variable name="blockquote" value="blockquote"/>
<cd:variable name="quote" value="minicitat"/>
<cd:variable name="speech" value="speech"/>
<cd:variable name="itemize" value="enumerare"/>
@@ -446,6 +447,7 @@
<cd:constant name="inbetween" value="intre"/>
<cd:constant name="type" value="type"/>
<cd:constant name="align" value="aliniere"/>
+ <cd:constant name="symalign" value="symaliniere"/>
<cd:constant name="urlspace" value="spatiuurl"/>
<cd:constant name="urlalternative" value="urlalternativ"/>
<cd:constant name="from" value="dela"/>
diff --git a/tex/context/sample/sample.tex b/tex/context/sample/sample.tex
index a173072aa..3a433183e 100644
--- a/tex/context/sample/sample.tex
+++ b/tex/context/sample/sample.tex
@@ -36,6 +36,8 @@ used in testing bibliographic references and citations.
\NC linden.tex \NC Eugene Linden \NC The Winds of Change, Climate, Weather, and the
Destruction of Civilizations, \endgraf
Simon \& Schuster, 2006, p.106 \NC \NR
+\NC weisman.tex \NC Alan Weisman \NC The World Without Us, \endgraf
+ Thomas Dunne Books, 2007, p.160 \NC \NR
\stoptabulate
% Tufte: This quote will always produce hyphenated text, apart from the content,
@@ -52,4 +54,7 @@ used in testing bibliographic references and citations.
% The Universe in a Nutshell: a beautiful designed book, (companion of A Short History
% of Time)
+% The World Without Us: A properly typeset, very readable book. Read it and you'll look at
+% the world around you differently (and a bit more freightened).
+
\stoptext
diff --git a/tex/context/sample/weisman.tex b/tex/context/sample/weisman.tex
new file mode 100644
index 000000000..7526d407a
--- /dev/null
+++ b/tex/context/sample/weisman.tex
@@ -0,0 +1,5 @@
+Since the mid-1990s, humans have taken an unprecedented step in Earthly
+annals by introducing not just exotic flora or fauna from one ecosystem
+into another, but actually inserting exotic genes into the operating
+systems of individual plants and animals, where they're intended to do
+exactly the same thing: copy themselves, over and over.
diff --git a/tex/context/test/x-cml-test.xml b/tex/context/test/x-cml-test.xml
index a177e5e2a..b7a3893de 100644
--- a/tex/context/test/x-cml-test.xml
+++ b/tex/context/test/x-cml-test.xml
@@ -1,6 +1,13 @@
<root>
<cml:dchem>
+ <cml:molecule>
+ <cml:atom n="2">H</cml:atom>
+ <cml:atom>O</cml:atom>
+ </cml:molecule>
+ </cml:dchem>
+
+ <cml:dchem>
<cml:reaction>
<cml:molecule n="2">
<cml:atom n="2"> H </cml:atom>
diff --git a/tex/generic/context/mptopdf.tex b/tex/generic/context/mptopdf.tex
index 84a8aa380..bb9f27d0c 100644
--- a/tex/generic/context/mptopdf.tex
+++ b/tex/generic/context/mptopdf.tex
@@ -112,6 +112,7 @@
\def\processMPfile#1 %
{\pdfoutput=1
+ \pdfpkresolution600
\pdfcompresslevel=9
\chardef\makeMPintoPDFobject=1
\hsize=100in
@@ -143,4 +144,32 @@
%D file can be converted to \EPS\ using for instance the
%D \PDFTOPS\ program (in \WEBC) or \GHOSTSCRIPT.
+%D A few helpers:
+
+{\catcode`\.=12
+ \catcode`\p=12
+ \catcode`\t=12
+ \gdef\WITHOUTPT#1pt{#1}}
+
+\def\withoutpt#1%
+ {\expandafter\WITHOUTPT#1}
+
+\def\negatecolorcomponent#1% #1 = \macro
+ {\scratchdimen1pt\advance\scratchdimen-#1\onepoint
+ \ifdim\scratchdimen<\zeropoint\scratchdimen\zeropoint\fi
+ \edef#1{\withoutpt\the\scratchdimen}}
+
+\let\negatedcolorcomponent\firstofoneargument
+
+\def\negatedcolorcomponent#1%
+ {\ifdim\dimexpr1pt-#1pt\relax<\zeropoint
+ 0pt%
+ \else
+ \expandafter\withoutpt\the\dimexpr1pt-#1pt\relax
+ \fi}
+
+\def\negatecolorcomponent#1% #1 = \macro
+ {\edef#1{\negatedcolorcomponent{#1}}}
+
+
\dump