summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMarius <mariausol@gmail.com>2010-07-04 15:35:28 +0300
committerMarius <mariausol@gmail.com>2010-07-04 15:35:28 +0300
commitb0f61c557fa27bddb54ad085c9dc9beefc851a30 (patch)
treea69dff7e9ee8d0022554603e8715fd482d4ac01c
parent85b7bc695629926641c7cb752fd478adfdf374f3 (diff)
downloadcontext-b0f61c557fa27bddb54ad085c9dc9beefc851a30.tar.gz
beta 2010-06-23 12:49
-rw-r--r--fonts/data/tests/texmfhome.otfbin0 -> 1884 bytes
-rw-r--r--scripts/context/lua/mtx-babel.lua2
-rw-r--r--scripts/context/lua/mtx-base.lua136
-rw-r--r--scripts/context/lua/mtx-cache.lua117
-rw-r--r--scripts/context/lua/mtx-check.lua2
-rw-r--r--scripts/context/lua/mtx-context.lua127
-rw-r--r--scripts/context/lua/mtx-convert.lua2
-rw-r--r--scripts/context/lua/mtx-fonts.lua52
-rw-r--r--scripts/context/lua/mtx-grep.lua2
-rw-r--r--scripts/context/lua/mtx-interface.lua2
-rw-r--r--scripts/context/lua/mtx-metatex.lua15
-rw-r--r--scripts/context/lua/mtx-modules.lua2
-rw-r--r--scripts/context/lua/mtx-mptopdf.lua2
-rw-r--r--scripts/context/lua/mtx-package.lua2
-rw-r--r--scripts/context/lua/mtx-patterns.lua146
-rw-r--r--scripts/context/lua/mtx-profile.lua2
-rw-r--r--scripts/context/lua/mtx-scite.lua2
-rw-r--r--scripts/context/lua/mtx-texworks.lua2
-rw-r--r--scripts/context/lua/mtx-tools.lua2
-rw-r--r--scripts/context/lua/mtx-update.lua35
-rw-r--r--scripts/context/lua/mtx-watch.lua2
-rw-r--r--scripts/context/lua/mtxrun.lua7366
-rw-r--r--scripts/context/perl/mptopdf.pl34
-rw-r--r--scripts/context/ruby/base/kpse.rb18
-rw-r--r--scripts/context/stubs/mswin/mtxrun.dllbin9216 -> 9216 bytes
-rw-r--r--scripts/context/stubs/mswin/mtxrun.lua7366
-rw-r--r--scripts/context/stubs/mswin/setuptex.bat34
-rw-r--r--scripts/context/stubs/source/mtxrun_dll.c125
-rw-r--r--scripts/context/stubs/unix/luatools8187
-rw-r--r--scripts/context/stubs/unix/mtxrun7366
-rw-r--r--tex/context/base/attr-div.lua649
-rw-r--r--tex/context/base/attr-div.mkiv136
-rw-r--r--tex/context/base/attr-ini.lua665
-rw-r--r--tex/context/base/attr-ini.mkiv151
-rw-r--r--tex/context/base/back-ini.lua14
-rw-r--r--tex/context/base/back-pdf.lua4
-rw-r--r--tex/context/base/bibl-bib.lua6
-rw-r--r--tex/context/base/bibl-tra.lua19
-rw-r--r--tex/context/base/blob-ini.lua4
-rw-r--r--tex/context/base/buff-ini.lua27
-rw-r--r--tex/context/base/buff-ini.mkiv23
-rw-r--r--tex/context/base/buff-ver.mkiv3
-rw-r--r--tex/context/base/catc-act.tex4
-rw-r--r--tex/context/base/catc-ini.mkiv6
-rw-r--r--tex/context/base/char-def.lua30
-rw-r--r--tex/context/base/chem-ini.lua4
-rw-r--r--tex/context/base/chem-str.lua6
-rw-r--r--tex/context/base/cont-new.mkiv4
-rw-r--r--tex/context/base/cont-new.tex4
-rw-r--r--tex/context/base/context.lus71
-rw-r--r--tex/context/base/context.mkiv34
-rw-r--r--tex/context/base/context.tex2
-rw-r--r--tex/context/base/core-con.lua2
-rw-r--r--tex/context/base/core-env.mkiv174
-rw-r--r--tex/context/base/core-uti.lua8
-rw-r--r--tex/context/base/core-uti.mkiv30
-rw-r--r--tex/context/base/data-aux.lua19
-rw-r--r--tex/context/base/data-con.lua84
-rw-r--r--tex/context/base/data-crl.lua25
-rw-r--r--tex/context/base/data-ctx.lua34
-rw-r--r--tex/context/base/data-env.lua161
-rw-r--r--tex/context/base/data-exp.lua336
-rw-r--r--tex/context/base/data-ini.lua222
-rw-r--r--tex/context/base/data-lst.lua23
-rw-r--r--tex/context/base/data-lua.lua28
-rw-r--r--tex/context/base/data-met.lua45
-rw-r--r--tex/context/base/data-pre.lua8
-rw-r--r--tex/context/base/data-res.lua1703
-rw-r--r--tex/context/base/data-sch.lua26
-rw-r--r--tex/context/base/data-tex.lua23
-rw-r--r--tex/context/base/data-tmf.lua100
-rw-r--r--tex/context/base/data-tmp.lua346
-rw-r--r--tex/context/base/data-tre.lua48
-rw-r--r--tex/context/base/data-use.lua48
-rw-r--r--tex/context/base/data-zip.lua48
-rw-r--r--tex/context/base/font-afm.lua147
-rw-r--r--tex/context/base/font-chk.lua4
-rw-r--r--tex/context/base/font-cid.lua8
-rw-r--r--tex/context/base/font-clr.lua30
-rw-r--r--tex/context/base/font-col.lua32
-rw-r--r--tex/context/base/font-ctx.lua40
-rw-r--r--tex/context/base/font-def.lua54
-rw-r--r--tex/context/base/font-enc.lua27
-rw-r--r--tex/context/base/font-enh.lua10
-rw-r--r--tex/context/base/font-ext.lua24
-rw-r--r--tex/context/base/font-fbk.lua2
-rw-r--r--tex/context/base/font-gds.lua17
-rw-r--r--tex/context/base/font-gds.mkiv2
-rw-r--r--tex/context/base/font-ini.lua7
-rw-r--r--tex/context/base/font-ini.mkii2
-rw-r--r--tex/context/base/font-ini.mkiv89
-rw-r--r--tex/context/base/font-log.lua6
-rw-r--r--tex/context/base/font-map.lua10
-rw-r--r--tex/context/base/font-mis.lua4
-rw-r--r--tex/context/base/font-ota.lua1
-rw-r--r--tex/context/base/font-otb.lua38
-rw-r--r--tex/context/base/font-otc.lua4
-rw-r--r--tex/context/base/font-otd.lua13
-rw-r--r--tex/context/base/font-otf.lua375
-rw-r--r--tex/context/base/font-otn.lua211
-rw-r--r--tex/context/base/font-otp.lua8
-rw-r--r--tex/context/base/font-ott.lua1
-rw-r--r--tex/context/base/font-pat.lua12
-rw-r--r--tex/context/base/font-syn.lua187
-rw-r--r--tex/context/base/font-tfm.lua31
-rw-r--r--tex/context/base/grph-inc.lua33
-rw-r--r--tex/context/base/grph-u3d.lua6
-rw-r--r--tex/context/base/l-aux.lua6
-rw-r--r--tex/context/base/l-boolean.lua4
-rw-r--r--tex/context/base/l-dir.lua2
-rw-r--r--tex/context/base/l-file.lua192
-rw-r--r--tex/context/base/l-lpeg.lua63
-rw-r--r--tex/context/base/l-os.lua109
-rw-r--r--tex/context/base/l-pdfview.lua8
-rw-r--r--tex/context/base/l-table.lua13
-rw-r--r--tex/context/base/l-url.lua116
-rw-r--r--tex/context/base/l-utils.lua154
-rw-r--r--tex/context/base/lang-ini.lua87
-rw-r--r--tex/context/base/lang-ini.mkiv3
-rw-r--r--tex/context/base/lang-wrd.lua6
-rw-r--r--tex/context/base/lpdf-ano.lua28
-rw-r--r--tex/context/base/lpdf-fld.lua60
-rw-r--r--tex/context/base/lpdf-ini.lua56
-rw-r--r--tex/context/base/lpdf-pdx.mkiv2
-rw-r--r--tex/context/base/luat-bas.mkiv23
-rw-r--r--tex/context/base/luat-cbk.lua97
-rw-r--r--tex/context/base/luat-cnf.lua85
-rw-r--r--tex/context/base/luat-cod.lua141
-rw-r--r--tex/context/base/luat-cod.mkiv123
-rw-r--r--tex/context/base/luat-dum.lua98
-rw-r--r--tex/context/base/luat-env.lua76
-rw-r--r--tex/context/base/luat-exe.lua41
-rw-r--r--tex/context/base/luat-fio.lua83
-rw-r--r--tex/context/base/luat-fmt.lua117
-rw-r--r--tex/context/base/luat-ini.lua98
-rw-r--r--tex/context/base/luat-ini.mkiv15
-rw-r--r--tex/context/base/luat-iop.lua26
-rw-r--r--tex/context/base/luat-lib.mkiv97
-rw-r--r--tex/context/base/luat-run.lua42
-rw-r--r--tex/context/base/luat-sto.lua36
-rw-r--r--tex/context/base/lxml-aux.lua4
-rw-r--r--tex/context/base/lxml-ent.lua4
-rw-r--r--tex/context/base/lxml-lpt.lua32
-rw-r--r--tex/context/base/lxml-tab.lua45
-rw-r--r--tex/context/base/lxml-tex.lua61
-rw-r--r--tex/context/base/m-barcodes.mkiv10
-rw-r--r--tex/context/base/m-mathcrap.mkiv76
-rw-r--r--tex/context/base/m-pstricks.lua13
-rw-r--r--tex/context/base/m-pstricks.mkiv6
-rw-r--r--tex/context/base/m-punk.mkiv4
-rw-r--r--tex/context/base/m-timing.tex26
-rw-r--r--tex/context/base/math-def.mkiv2
-rw-r--r--tex/context/base/math-dim.lua1
-rw-r--r--tex/context/base/math-ent.lua2
-rw-r--r--tex/context/base/math-ext.lua23
-rw-r--r--tex/context/base/math-ini.lua8
-rw-r--r--tex/context/base/math-ini.mkiv4
-rw-r--r--tex/context/base/math-int.mkiv41
-rw-r--r--tex/context/base/math-map.lua12
-rw-r--r--tex/context/base/math-noa.lua72
-rw-r--r--tex/context/base/math-vfu.lua200
-rw-r--r--tex/context/base/meta-ini.mkiv5
-rw-r--r--tex/context/base/meta-pdf.lua14
-rw-r--r--tex/context/base/metatex.lus9
-rw-r--r--tex/context/base/metatex.tex8
-rw-r--r--tex/context/base/mlib-ctx.lua4
-rw-r--r--tex/context/base/mlib-pdf.lua4
-rw-r--r--tex/context/base/mlib-pps.lua19
-rw-r--r--tex/context/base/mlib-pps.mkiv8
-rw-r--r--tex/context/base/mlib-run.lua114
-rw-r--r--tex/context/base/mtx-context-arrange.tex2
-rw-r--r--tex/context/base/mult-cld.lua8
-rw-r--r--tex/context/base/mult-ini.mkiv138
-rw-r--r--tex/context/base/node-dum.lua110
-rw-r--r--tex/context/base/node-fin.lua2
-rw-r--r--tex/context/base/node-fnt.lua70
-rw-r--r--tex/context/base/node-ini.lua63
-rw-r--r--tex/context/base/node-ini.mkiv35
-rw-r--r--tex/context/base/node-inj.lua134
-rw-r--r--tex/context/base/node-mig.lua6
-rw-r--r--tex/context/base/node-par.lua4
-rw-r--r--tex/context/base/node-pro.lua11
-rw-r--r--tex/context/base/node-ref.lua16
-rw-r--r--tex/context/base/node-res.lua41
-rw-r--r--tex/context/base/node-rul.lua29
-rw-r--r--tex/context/base/node-spl.lua589
-rw-r--r--tex/context/base/node-spl.mkiv114
-rw-r--r--tex/context/base/node-tra.lua6
-rw-r--r--tex/context/base/node-tsk.lua18
-rw-r--r--tex/context/base/page-flt.lua46
-rw-r--r--tex/context/base/page-flt.mkiv16
-rw-r--r--tex/context/base/page-lin.lua10
-rw-r--r--tex/context/base/page-mul.mkiv1
-rw-r--r--tex/context/base/page-str.lua22
-rw-r--r--tex/context/base/page-str.mkii45
-rw-r--r--tex/context/base/s-abr-01.tex55
-rw-r--r--tex/context/base/s-fnt-23.tex9
-rw-r--r--tex/context/base/s-fnt-25.tex19
-rw-r--r--tex/context/base/s-mod-02.tex17
-rw-r--r--tex/context/base/s-pre-50.tex2
-rw-r--r--tex/context/base/scrp-cjk.lua8
-rw-r--r--tex/context/base/scrp-ini.lua6
-rw-r--r--tex/context/base/sort-ini.lua8
-rw-r--r--tex/context/base/spac-ver.lua57
-rw-r--r--tex/context/base/strc-def.mkiv2
-rw-r--r--tex/context/base/strc-doc.lua21
-rw-r--r--tex/context/base/strc-doc.mkiv2
-rw-r--r--tex/context/base/strc-flt.mkii46
-rw-r--r--tex/context/base/strc-flt.mkiv47
-rw-r--r--tex/context/base/strc-ini.lua4
-rw-r--r--tex/context/base/strc-itm.mkiv2
-rw-r--r--tex/context/base/strc-lst.lua14
-rw-r--r--tex/context/base/strc-lst.mkiv33
-rw-r--r--tex/context/base/strc-mat.mkiv28
-rw-r--r--tex/context/base/strc-not.lua49
-rw-r--r--tex/context/base/strc-not.mkiv2
-rw-r--r--tex/context/base/strc-num.lua12
-rw-r--r--tex/context/base/strc-pag.lua12
-rw-r--r--tex/context/base/strc-pag.mkiv1
-rw-r--r--tex/context/base/strc-ref.lua14
-rw-r--r--tex/context/base/strc-ref.mkiv32
-rw-r--r--tex/context/base/strc-reg.lua8
-rw-r--r--tex/context/base/strc-reg.mkiv12
-rw-r--r--tex/context/base/strc-sec.mkiv6
-rw-r--r--tex/context/base/supp-fil.lua22
-rw-r--r--tex/context/base/supp-num.tex21
-rw-r--r--tex/context/base/supp-ran.lua4
-rw-r--r--tex/context/base/syst-lua.lua23
-rw-r--r--tex/context/base/syst-mes.mkiv38
-rw-r--r--tex/context/base/task-ini.lua53
-rw-r--r--tex/context/base/trac-deb.lua247
-rw-r--r--tex/context/base/trac-deb.mkiv24
-rw-r--r--tex/context/base/trac-inf.lua164
-rw-r--r--tex/context/base/trac-lmx.lua8
-rw-r--r--tex/context/base/trac-log.lua307
-rw-r--r--tex/context/base/trac-pro.lua207
-rw-r--r--tex/context/base/trac-set.lua254
-rw-r--r--tex/context/base/trac-tex.lua47
-rw-r--r--tex/context/base/trac-tex.mkiv21
-rw-r--r--tex/context/base/trac-tim.lua16
-rw-r--r--tex/context/base/trac-tra.lua216
-rw-r--r--tex/context/base/type-dejavu.mkiv47
-rw-r--r--tex/context/base/type-one.mkii40
-rw-r--r--tex/context/base/type-otf.mkiv80
-rw-r--r--tex/context/base/typo-cap.lua109
-rw-r--r--tex/context/base/typo-cap.mkiv6
-rw-r--r--tex/context/base/typo-dig.lua94
-rw-r--r--tex/context/base/typo-dig.mkiv12
-rw-r--r--tex/context/base/typo-mir.lua12
-rw-r--r--tex/context/base/typo-rep.lua8
-rw-r--r--tex/context/base/typo-rep.mkiv4
-rw-r--r--tex/context/base/typo-spa.lua14
-rw-r--r--tex/context/base/x-asciimath.lua10
-rw-r--r--tex/context/base/x-asciimath.mkiv2
-rw-r--r--tex/context/base/x-calcmath.lua7
-rw-r--r--tex/context/base/x-calcmath.mkiv18
-rw-r--r--tex/context/base/x-cals.mkiv2
-rw-r--r--tex/context/base/x-ct.mkiv2
-rw-r--r--tex/context/base/x-mathml.mkiv2
-rw-r--r--tex/context/base/x-set-11.mkiv15
-rw-r--r--tex/context/fonts/informal-math.lfg12
-rw-r--r--tex/context/fonts/lm-math.lfg9
-rw-r--r--tex/context/patterns/lang-it.pat2
-rw-r--r--tex/context/sample/khatt-ar.tex4
-rw-r--r--tex/context/sample/khatt-en.tex4
-rw-r--r--tex/generic/context/luatex-fonts-merged.lua2163
-rw-r--r--tex/generic/context/luatex-fonts.lua7
-rw-r--r--web2c/context.cnf84
-rw-r--r--web2c/contextcnf.lua169
269 files changed, 22451 insertions, 25153 deletions
diff --git a/fonts/data/tests/texmfhome.otf b/fonts/data/tests/texmfhome.otf
new file mode 100644
index 000000000..d0af1152f
--- /dev/null
+++ b/fonts/data/tests/texmfhome.otf
Binary files differ
diff --git a/scripts/context/lua/mtx-babel.lua b/scripts/context/lua/mtx-babel.lua
index 01e2ba4b2..7e08633cf 100644
--- a/scripts/context/lua/mtx-babel.lua
+++ b/scripts/context/lua/mtx-babel.lua
@@ -415,7 +415,7 @@ do
end
-logs.extendbanner("Babel Input To UTF Conversion 1.20",true)
+logs.extendbanner("Babel Input To UTF Conversion 1.20")
messages.help = [[
--language=string conversion language (e.g. greek)
diff --git a/scripts/context/lua/mtx-base.lua b/scripts/context/lua/mtx-base.lua
new file mode 100644
index 000000000..76284ac99
--- /dev/null
+++ b/scripts/context/lua/mtx-base.lua
@@ -0,0 +1,136 @@
+if not modules then modules = { } end modules ['mtx-base'] = {
+ version = 1.001,
+ comment = "formerly known as luatools",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+logs.extendbanner("ConTeXt TDS Management Tool 1.35 (aka luatools)")
+
+-- private option --noluc for testing errors in the stub
+
+local instance = resolvers.instance
+
+instance.engine = environment.arguments["engine"] or instance.engine or 'luatex'
+instance.progname = environment.arguments["progname"] or instance.progname or 'context'
+instance.luaname = environment.arguments["luafile"] or ""
+instance.lualibs = environment.arguments["lualibs"] or nil
+instance.allresults = environment.arguments["all"] or false
+instance.pattern = environment.arguments["pattern"] or nil
+instance.sortdata = environment.arguments["sort"] or false
+instance.my_format = environment.arguments["format"] or instance.format
+
+if type(instance.pattern) == 'boolean' then
+ logs.simple("invalid pattern specification")
+ instance.pattern = nil
+end
+
+if environment.arguments["trace"] then
+ resolvers.settrace(environment.arguments["trace"]) -- move to mtxrun ?
+end
+
+runners = runners or { }
+messages = messages or { }
+
+messages.no_ini_file = [[
+There is no lua initialization file found. This file can be forced by the
+"--progname" directive, or specified with "--luaname", or it is derived
+automatically from the formatname (aka jobname). It may be that you have
+to regenerate the file database using "mtxrun --generate".
+]]
+
+messages.help = [[
+--generate generate file database
+--variables show configuration variables
+--expansions show expanded variables
+--configurations show configuration order
+--expand-braces expand complex variable
+--expand-path expand variable (resolve paths)
+--expand-var expand variable (resolve references)
+--show-path show path expansion of ...
+--var-value report value of variable
+--find-file report file location
+--find-path report path of file
+--make or --ini make luatex format
+--run or --fmt= run luatex format
+--luafile=str lua inifile (default is <progname>.lua)
+--lualibs=list libraries to assemble (optional when --compile)
+--compile assemble and compile lua inifile
+--verbose give a bit more info
+--all show all found files
+--sort sort cached data
+--format=str filter cf format specification (default 'tex', use 'any' for any match)
+--engine=str target engine
+--progname=str format or backend
+--pattern=str filter variables
+--trackers=list enable given trackers
+]]
+
+if environment.arguments["find-file"] then
+ resolvers.load()
+ instance.format = environment.arguments["format"] or instance.format
+ if instance.pattern then
+ instance.allresults = true
+ resolvers.for_files(resolvers.find_files, { instance.pattern }, instance.my_format)
+ else
+ resolvers.for_files(resolvers.find_files, environment.files, instance.my_format)
+ end
+elseif environment.arguments["find-path"] then
+ resolvers.load()
+ local path = resolvers.find_path(environment.files[1], instance.my_format)
+ print(path) -- quite basic, wil become function in logs
+elseif environment.arguments["run"] then
+ resolvers.load("nofiles") -- ! no need for loading databases
+ trackers.enable("resolvers.locating")
+ environment.run_format(environment.files[1] or "",environment.files[2] or "",environment.files[3] or "")
+elseif environment.arguments["fmt"] then
+ resolvers.load("nofiles") -- ! no need for loading databases
+ trackers.enable("resolvers.locating")
+ environment.run_format(environment.arguments["fmt"], environment.files[1] or "",environment.files[2] or "")
+elseif environment.arguments["expand-braces"] then
+ resolvers.load("nofiles")
+ resolvers.for_files(resolvers.expand_braces, environment.files)
+elseif environment.arguments["expand-path"] then
+ resolvers.load("nofiles")
+ resolvers.for_files(resolvers.expand_path, environment.files)
+elseif environment.arguments["expand-var"] or environment.arguments["expand-variable"] then
+ resolvers.load("nofiles")
+ resolvers.for_files(resolvers.expand_var, environment.files)
+elseif environment.arguments["show-path"] or environment.arguments["path-value"] then
+ resolvers.load("nofiles")
+ resolvers.for_files(resolvers.show_path, environment.files)
+elseif environment.arguments["var-value"] or environment.arguments["show-value"] then
+ resolvers.load("nofiles")
+ resolvers.for_files(resolvers.var_value, environment.files)
+elseif environment.arguments["format-path"] then
+ resolvers.load()
+ logs.simple(caches.getwritablepath("format"))
+elseif instance.pattern then -- brrr
+ resolvers.load()
+ instance.format = environment.arguments["format"] or instance.format
+ instance.allresults = true
+ resolvers.for_files(resolvers.find_files, { instance.pattern }, instance.my_format)
+elseif environment.arguments["generate"] then
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+elseif environment.arguments["make"] or environment.arguments["ini"] or environment.arguments["compile"] then
+ resolvers.load()
+ trackers.enable("resolvers.locating")
+ environment.make_format(environment.files[1] or "")
+elseif environment.arguments["variables"] or environment.arguments["show-variables"] then
+ resolvers.load("nofiles")
+ resolvers.listers.variables(false,instance.pattern)
+elseif environment.arguments["expansions"] or environment.arguments["show-expansions"] then
+ resolvers.load("nofiles")
+ resolvers.listers.expansions(false,instance.pattern)
+elseif environment.arguments["configurations"] or environment.arguments["show-configurations"] then
+ resolvers.load("nofiles")
+ resolvers.listers.configurations(false,instance.pattern)
+elseif environment.arguments["help"] or (environment.files[1]=='help') or (#environment.files==0) then
+ logs.help(messages.help)
+else
+ resolvers.load()
+ resolvers.for_files(resolvers.find_files, environment.files, instance.my_format)
+end
diff --git a/scripts/context/lua/mtx-cache.lua b/scripts/context/lua/mtx-cache.lua
index c2a0db00d..a6985d3bc 100644
--- a/scripts/context/lua/mtx-cache.lua
+++ b/scripts/context/lua/mtx-cache.lua
@@ -9,70 +9,87 @@ if not modules then modules = { } end modules ['mtx-cache'] = {
scripts = scripts or { }
scripts.cache = scripts.cache or { }
-function scripts.cache.collect_one(...)
- local path = caches.setpath(...)
- local tmas = dir.glob(path .. "/*.tma")
- local tmcs = dir.glob(path .. "/*.tmc")
- return path, tmas, tmcs
-end
-
-function scripts.cache.collect_two(...)
- local path = caches.setpath(...)
- local rest = dir.glob(path .. "/**/*")
- return path, rest
-end
-
-local suffixes = { "afm", "tfm", "def", "enc", "otf", "mp", "data" }
-
-function scripts.cache.process_one(action)
- for i=1,#suffixes do
- action("fonts", suffixes[i])
+local function collect(path)
+ local all = dir.glob(path .. "/**/*")
+ local tmas, tmcs, rest = { }, { }, { }
+ for i=1,#all do
+ local name = all[i]
+ local suffix = file.suffix(name)
+ if suffix == "tma" then
+ tmas[#tmas+1] = name
+ elseif suffix == "tmc" then
+ tmcs[#tmcs+1] = name
+ else
+ rest[#rest+1] = name
+ end
end
+ return tmas, tmcs, rest, all
end
-function scripts.cache.process_two(action)
- action("curl")
+local function list(banner,path,tmas,tmcs,rest)
+ logs.report("cache",string.format("%s: %s",banner,path))
+ logs.report()
+ logs.report("cache",string.format("tma : %4i",#tmas))
+ logs.report("cache",string.format("tmc : %4i",#tmcs))
+ logs.report("cache",string.format("rest : %4i",#rest))
+ logs.report("cache",string.format("total : %4i",#tmas+#tmcs+#rest))
+ logs.report()
end
--- todo: recursive delete of paths
-
-function scripts.cache.remove(list,keep)
- local n, keepsuffixes = 0, table.tohash(keep or { })
+local function purge(banner,path,list,all)
+ logs.report("cache",string.format("%s: %s",banner,path))
+ logs.report()
+ local n = 0
for i=1,#list do
local filename = list[i]
if string.find(filename,"luatex%-cache") then -- safeguard
- if not keepsuffixes[file.extname(filename) or ""] then
+ if all then
os.remove(filename)
n = n + 1
+ else
+ local suffix = file.suffix(filename)
+ if suffix == "tma" then
+ local checkname = file.replacesuffix(filename,"tma","tmc")
+ if lfs.isfile(checkname) then
+ os.remove(filename)
+ n = n + 1
+ end
+ end
end
end
end
- return n
+ logs.report("cache",string.format("removed tma files : %i",n))
+ logs.report()
+end
+
+function scripts.cache.purge()
+ local writable = caches.getwritablepath()
+ local tmas, tmcs, rest = collect(writable)
+ list("writable path",writable,tmas,tmcs,rest)
+ local n = purge("writable path",writable,tmas)
+ list("writable path",writable,tmas,tmcs,rest)
end
-function scripts.cache.delete(all,keep)
- scripts.cache.process_one(function(...)
- local path, rest = scripts.cache.collect_one(...)
- local n = scripts.cache.remove(rest,keep)
- logs.report("cache path",string.format("%4i files out of %4i deleted on %s",n,#rest,path))
- end)
- scripts.cache.process_two(function(...)
- local path, rest = scripts.cache.collect_two(...)
- local n = scripts.cache.remove(rest,keep)
- logs.report("cache path",string.format("%4i files out of %4i deleted on %s",n,#rest,path))
- end)
+function scripts.cache.erase()
+ local writable = caches.getwritablepath()
+ local tmas, tmcs, rest, all = collect(writable)
+ list("writable path",writable,tmas,tmcs,rest)
+ local n = purge("writable path",writable,all,true)
+ list("writable path",writable,tmas,tmcs,rest)
end
-function scripts.cache.list(all)
- scripts.cache.process_one(function(...)
- local path, tmas, tmcs = scripts.cache.collect_one(...)
- logs.report("cache path",string.format("%4i (tma:%4i, tmc:%4i) %s",#tmas+#tmcs,#tmas,#tmcs,path))
- logs.report("cache path",string.format("%4i (tma:%4i, tmc:%4i) %s",#tmas+#tmcs,#tmas,#tmcs,path))
- end)
- scripts.cache.process_two(function(...)
- local path, rest = scripts.cache.collect_two("curl")
- logs.report("cache path",string.format("%4i %s",#rest,path))
- end)
+function scripts.cache.list()
+ local readables = caches.getreadablepaths()
+ local writable = caches.getwritablepath()
+ local tmas, tmcs, rest = collect(writable)
+ list("writable path",writable,tmas,tmcs,rest)
+ for i=1,#readables do
+ local readable = readables[i]
+ if readable ~= writable then
+ local tmas, tmcs = collect(readable)
+ list("readable path",readable,tmas,tmcs,rest)
+ end
+ end
end
logs.extendbanner("ConTeXt & MetaTeX Cache Management 0.10")
@@ -86,11 +103,11 @@ messages.help = [[
]]
if environment.argument("purge") then
- scripts.cache.delete(environment.argument("all"),{"tmc"})
+ scripts.cache.purge()
elseif environment.argument("erase") then
- scripts.cache.delete(environment.argument("all"))
+ scripts.cache.erase()
elseif environment.argument("list") then
- scripts.cache.list(environment.argument("all"))
+ scripts.cache.list()
else
logs.help(messages.help)
end
diff --git a/scripts/context/lua/mtx-check.lua b/scripts/context/lua/mtx-check.lua
index 4266ddf0d..0c9a1708d 100644
--- a/scripts/context/lua/mtx-check.lua
+++ b/scripts/context/lua/mtx-check.lua
@@ -127,7 +127,7 @@ function scripts.checker.check(filename)
end
end
-logs.extendbanner("Basic ConTeXt Syntax Checking 0.10",true)
+logs.extendbanner("Basic ConTeXt Syntax Checking 0.10")
messages.help = [[
--convert check tex file for errors
diff --git a/scripts/context/lua/mtx-context.lua b/scripts/context/lua/mtx-context.lua
index 79e74e407..36eb01c52 100644
--- a/scripts/context/lua/mtx-context.lua
+++ b/scripts/context/lua/mtx-context.lua
@@ -443,13 +443,6 @@ function scripts.context.multipass.makeoptionfile(jobname,ctxdata,kindofrun,curr
--
setalways("\\unprotect")
--
- setalways("%% special commands, mostly for the ctx development team")
- --
- if environment.argument("dumpdelta") then
- setalways("\\tracersdumpdelta")
- elseif environment.argument("dumphash") then
- setalways("\\tracersdumphash")
- end
setalways("%% feedback and basic job control")
if type(environment.argument("track")) == "string" then
setvalue ("track" , "\\enabletrackers[%s]")
@@ -591,9 +584,9 @@ scripts.context.interfaces = {
scripts.context.defaultformats = {
"cont-en",
"cont-nl",
- "mptopdf",
--- "metatex",
- "metafun",
+-- "mptopdf", -- todo: mak emkiv variant
+-- "metatex", -- will show up soon
+-- "metafun", -- todo: mp formats
-- "plain"
}
@@ -686,7 +679,7 @@ function scripts.context.run(ctxdata,filename)
local filename = files[i]
local basename, pathname = file.basename(filename), file.dirname(filename)
local jobname = file.removesuffix(basename)
- if pathname == "" then
+ if pathname == "" and not environment.argument("global") then
filename = "./" .. filename
end
-- look at the first line
@@ -747,14 +740,23 @@ function scripts.context.run(ctxdata,filename)
oldbase = file.removesuffix(jobname)
newbase = file.removesuffix(resultname)
if oldbase ~= newbase then
- for _, suffix in next, scripts.context.beforesuffixes do
- local oldname = file.addsuffix(oldbase,suffix)
- local newname = file.addsuffix(newbase,suffix)
- local tmpname = "keep-"..oldname
- os.remove(tmpname)
- os.rename(oldname,tmpname)
- os.remove(oldname)
- os.rename(newname,oldname)
+ if environment.argument("purgeresult") then
+ for _, suffix in next, scripts.context.aftersuffixes do
+ local oldname = file.addsuffix(oldbase,suffix)
+ local newname = file.addsuffix(newbase,suffix)
+ os.remove(newname)
+ os.remove(oldname)
+ end
+ else
+ for _, suffix in next, scripts.context.beforesuffixes do
+ local oldname = file.addsuffix(oldbase,suffix)
+ local newname = file.addsuffix(newbase,suffix)
+ local tmpname = "keep-"..oldname
+ os.remove(tmpname)
+ os.rename(oldname,tmpname)
+ os.remove(oldname)
+ os.rename(newname,oldname)
+ end
end
else
resultname = nil
@@ -851,13 +853,24 @@ function scripts.context.run(ctxdata,filename)
os.remove(jobname..".top")
--
if resultname then
- for _, suffix in next, scripts.context.aftersuffixes do
- local oldname = file.addsuffix(oldbase,suffix)
- local newname = file.addsuffix(newbase,suffix)
- local tmpname = "keep-"..oldname
- os.remove(newname)
- os.rename(oldname,newname)
- os.rename(tmpname,oldname)
+ if environment.argument("purgeresult") then
+ -- so, if there is no result then we don't get the old one, but
+ -- related files (log etc) are still there for tracing purposes
+ for _, suffix in next, scripts.context.aftersuffixes do
+ local oldname = file.addsuffix(oldbase,suffix)
+ local newname = file.addsuffix(newbase,suffix)
+ os.remove(newname) -- to be sure
+ os.rename(oldname,newname)
+ end
+ else
+ for _, suffix in next, scripts.context.aftersuffixes do
+ local oldname = file.addsuffix(oldbase,suffix)
+ local newname = file.addsuffix(newbase,suffix)
+ local tmpname = "keep-"..oldname
+ os.remove(newname)
+ os.rename(oldname,newname)
+ os.rename(tmpname,oldname)
+ end
end
logs.simple("result renamed to: %s",newbase)
end
@@ -953,32 +966,33 @@ function scripts.context.pipe()
end
end
+local make_mkiv_format = environment.make_format
+
+local function make_mkii_format(name,engine)
+ if environment.argument(engine) then
+ local command = string.format("mtxrun texexec.rb --make --%s %s",name,engine)
+ logs.simple("running command: %s",command)
+ os.spawn(command)
+ end
+end
+
function scripts.context.make(name)
- local runners = {
- "luatools --make --compile ",
- (environment.argument("pdftex") and "mtxrun texexec.rb --make --pdftex ") or false,
- (environment.argument("xetex") and "mtxrun texexec.rb --make --xetex " ) or false,
- }
local list = (name and { name }) or (environment.files[1] and environment.files) or scripts.context.defaultformats
for i=1,#list do
local name = list[i]
- name = scripts.context.interfaces[name] or name
- for i=1,#runners do
- local runner = runners[i]
- if runner then
- local command = runner .. name
- logs.simple("running command: %s",command)
- os.spawn(command)
- end
+ name = scripts.context.interfaces[name] or name or ""
+ if name ~= "" then
+ make_mkiv_format(name)
+ make_mkii_format(name,"pdftex")
+ make_mkii_format(name,"xetex")
end
end
end
function scripts.context.generate()
- -- hack, should also be a shared function
- local command = "luatools --generate "
- logs.simple("running command: %s",command)
- os.spawn(command)
+ resolvers.instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
end
function scripts.context.ctx()
@@ -1271,9 +1285,10 @@ function scripts.context.timed(action)
statistics.timed(action)
end
-local zipname = "cont-tmf.zip"
-local mainzip = "http://www.pragma-ade.com/context/latest/" .. zipname
-local validtrees = { "texmf-local", "texmf-context" }
+local zipname = "cont-tmf.zip"
+local mainzip = "http://www.pragma-ade.com/context/latest/" .. zipname
+local validtrees = { "texmf-local", "texmf-context" }
+local selfscripts = { "mtxrun.lua" } -- was: { "luatools.lua", "mtxrun.lua" }
function zip.loaddata(zipfile,filename) -- should be in zip lib
local f = zipfile:open(filename)
@@ -1380,7 +1395,7 @@ function scripts.context.update()
end
end
end
- for _, scriptname in next, { "luatools.lua", "mtxrun.lua" } do
+ for _, scriptname in next, selfscripts do
local oldscript = resolvers.find_file(scriptname) or ""
if oldscript ~= "" and is_okay(oldscript) then
local newscript = "./scripts/context/lua/" .. scriptname
@@ -1396,8 +1411,10 @@ function scripts.context.update()
end
end
if force then
- os.execute("context --generate")
- os.execute("context --make")
+ -- os.execute("context --generate")
+ -- os.execute("context --make")
+ scripts.context.generate()
+ scripts.context.make()
end
end
if force then
@@ -1407,7 +1424,7 @@ function scripts.context.update()
end
end
-logs.extendbanner("ConTeXt Process Management 0.51",true)
+logs.extendbanner("ConTeXt Process Management 0.51")
messages.help = [[
--run process (one or more) files (default action)
@@ -1428,6 +1445,7 @@ messages.help = [[
--result=name rename the resulting output to the given name
--trackers=list show/set tracker variables
--directives=list show/set directive variables
+--purgeresult purge result file before run
--forcexml force xml stub (optional flag: --mkii)
--forcecld force cld (context lua document) stub
@@ -1468,13 +1486,6 @@ expert options:
--extras show extras
]]
-messages.private = [[
-private options:
-
---dumphash dump hash table afterwards
---dumpdelta dump hash table afterwards (only new entries)
-]]
-
messages.special = [[
special options:
@@ -1517,7 +1528,7 @@ elseif environment.argument("touch") then
elseif environment.argument("update") then
scripts.context.update()
elseif environment.argument("expert") then
- logs.help(table.join({ messages.expert, messages.private, messages.special },"\n"))
+ logs.help(table.join({ messages.expert, messages.special },"\n"))
elseif environment.argument("extras") then
scripts.context.extras()
elseif environment.argument("extra") then
diff --git a/scripts/context/lua/mtx-convert.lua b/scripts/context/lua/mtx-convert.lua
index 62198a621..448a1b6ca 100644
--- a/scripts/context/lua/mtx-convert.lua
+++ b/scripts/context/lua/mtx-convert.lua
@@ -119,7 +119,7 @@ function scripts.convert.convertgiven()
end
-logs.extendbanner("ConTeXT Graphic Conversion Helpers 0.10",true)
+logs.extendbanner("ConTeXT Graphic Conversion Helpers 0.10")
messages.help = [[
--convertall convert all graphics on path
diff --git a/scripts/context/lua/mtx-fonts.lua b/scripts/context/lua/mtx-fonts.lua
index 74012ae38..483c834a5 100644
--- a/scripts/context/lua/mtx-fonts.lua
+++ b/scripts/context/lua/mtx-fonts.lua
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['mtx-fonts'] = {
license = "see context related readme files"
}
+-- todo: fc-cache -v en check dirs, or better is: fc-cat -v | grep Directory
+
if not fontloader then fontloader = fontforge end
dofile(resolvers.find_file("font-otp.lua","tex"))
@@ -15,6 +17,46 @@ dofile(resolvers.find_file("font-mis.lua","tex"))
scripts = scripts or { }
scripts.fonts = scripts.fonts or { }
+function fonts.names.statistics()
+ fonts.names.load()
+
+ local data = fonts.names.data
+ local statistics = data.statistics
+
+ local function counted(t)
+ local n = { }
+ for k, v in next, t do
+ n[k] = table.count(v)
+ end
+ return table.sequenced(n)
+ end
+
+ logs.simple("cache uuid : %s", data.cache_uuid)
+ logs.simple("cache version : %s", data.cache_version)
+ logs.simple("number of trees : %s", #data.data_state)
+ logs.simpleline()
+ logs.simple("number of fonts : %s", statistics.fonts or 0)
+ logs.simple("used files : %s", statistics.readfiles or 0)
+ logs.simple("skipped files : %s", statistics.skippedfiles or 0)
+ logs.simple("duplicate files : %s", statistics.duplicatefiles or 0)
+ logs.simple("specifications : %s", #data.specifications)
+ logs.simple("families : %s", table.count(data.families))
+ logs.simpleline()
+ logs.simple("mappings : %s", counted(data.mappings))
+ logs.simple("fallbacks : %s", counted(data.fallbacks))
+ logs.simpleline()
+ logs.simple("used styles : %s", table.sequenced(statistics.used_styles))
+ logs.simple("used variants : %s", table.sequenced(statistics.used_variants))
+ logs.simple("used weights : %s", table.sequenced(statistics.used_weights))
+ logs.simple("used widths : %s", table.sequenced(statistics.used_widths))
+ logs.simpleline()
+ logs.simple("found styles : %s", table.sequenced(statistics.styles))
+ logs.simple("found variants : %s", table.sequenced(statistics.variants))
+ logs.simple("found weights : %s", table.sequenced(statistics.weights))
+ logs.simple("found widths : %s", table.sequenced(statistics.widths))
+
+end
+
function fonts.names.simple()
local simpleversion = 1.001
local simplelist = { "ttf", "otf", "ttc", "dfont" }
@@ -151,7 +193,6 @@ local function list_specifications(t,info)
subfont(entry.subfont),
fontweight(entry.fontweight),
}
- e[k] = entry
end
table.formatcolumns(s)
for k=1,#s do
@@ -243,6 +284,9 @@ function scripts.fonts.list()
elseif given then
--~ mtxrun --script font --list somename
list_matches(fonts.names.list(given,reload,all),info)
+ elseif all then
+ pattern = "*"
+ list_matches(fonts.names.list(string.topattern(pattern,true),reload,all),info)
else
logs.report("fontnames","not supported: --list <no specification>",name)
end
@@ -290,7 +334,7 @@ function scripts.fonts.save()
end
end
-logs.extendbanner("ConTeXt Font Database Management 0.21",true)
+logs.extendbanner("ConTeXt Font Database Management 0.21")
messages.help = [[
--save save open type font in raw table
@@ -307,6 +351,7 @@ messages.help = [[
--all show all found instances
--info give more details
--track=list enable trackers
+--statistics some info about the database
examples of searches:
@@ -320,6 +365,7 @@ mtxrun --script font --list --spec somename-bold-italic
mtxrun --script font --list --spec --pattern=*somename*
mtxrun --script font --list --spec --filter="fontname=somename"
mtxrun --script font --list --spec --filter="familyname=somename,weight=bold,style=italic,width=condensed"
+mtxrun --script font --list --spec --filter="familyname=crap*,weight=bold,style=italic"
mtxrun --script font --list --file somename
mtxrun --script font --list --file --pattern=*somename*
@@ -340,6 +386,8 @@ elseif environment.argument("reload") then
scripts.fonts.reload()
elseif environment.argument("save") then
scripts.fonts.save()
+elseif environment.argument("statistics") then
+ fonts.names.statistics()
else
logs.help(messages.help)
end
diff --git a/scripts/context/lua/mtx-grep.lua b/scripts/context/lua/mtx-grep.lua
index 9604bc9f8..98378f92b 100644
--- a/scripts/context/lua/mtx-grep.lua
+++ b/scripts/context/lua/mtx-grep.lua
@@ -9,7 +9,7 @@ if not modules then modules = { } end modules ['mtx-babel'] = {
scripts = scripts or { }
scripts.grep = scripts.grep or { }
-logs.extendbanner("Simple Grepper 0.10",true)
+logs.extendbanner("Simple Grepper 0.10")
local find, format = string.find, string.format
diff --git a/scripts/context/lua/mtx-interface.lua b/scripts/context/lua/mtx-interface.lua
index 730a030d9..34ecffab0 100644
--- a/scripts/context/lua/mtx-interface.lua
+++ b/scripts/context/lua/mtx-interface.lua
@@ -242,7 +242,7 @@ function scripts.interface.messages()
end
end
-logs.extendbanner("ConTeXt Interface Related Goodies 0.11",true)
+logs.extendbanner("ConTeXt Interface Related Goodies 0.11")
messages.help = [[
--scite generate scite interface
diff --git a/scripts/context/lua/mtx-metatex.lua b/scripts/context/lua/mtx-metatex.lua
index 4453e2ccb..fdda51ced 100644
--- a/scripts/context/lua/mtx-metatex.lua
+++ b/scripts/context/lua/mtx-metatex.lua
@@ -14,20 +14,9 @@ scripts.metatex = scripts.metatex or { }
-- metatex
function scripts.metatex.make()
- local command = "luatools --make --compile metatex"
- logs.simple("running command: %s",command)
- os.spawn(command)
+ environment.make_format("metatex")
end
---~ function scripts.metatex.run()
---~ local name = environment.files[1] or ""
---~ if name ~= "" then
---~ local command = "luatools --fmt=metatex " .. name
---~ logs.simple("running command: %s",command)
---~ os.spawn(command)
---~ end
---~ end
-
function scripts.metatex.run(ctxdata,filename)
local filename = environment.files[1] or ""
if filename ~= "" then
@@ -49,7 +38,7 @@ function scripts.metatex.timed(action)
statistics.timed(action)
end
-logs.extendbanner("MetaTeX Process Management 0.10",true)
+logs.extendbanner("MetaTeX Process Management 0.10")
messages.help = [[
--run process (one or more) files (default action)
diff --git a/scripts/context/lua/mtx-modules.lua b/scripts/context/lua/mtx-modules.lua
index 3a348593f..696e4767f 100644
--- a/scripts/context/lua/mtx-modules.lua
+++ b/scripts/context/lua/mtx-modules.lua
@@ -150,7 +150,7 @@ end
-- context --ctx=m-modules.ctx xxx.mkiv
-logs.extendbanner("ConTeXt Module Documentation Generators 1.00",true)
+logs.extendbanner("ConTeXt Module Documentation Generators 1.00")
messages.help = [[
--convert convert source files (tex, mkii, mkiv, mp) to 'ted' files
diff --git a/scripts/context/lua/mtx-mptopdf.lua b/scripts/context/lua/mtx-mptopdf.lua
index 342ff1c28..4662c2830 100644
--- a/scripts/context/lua/mtx-mptopdf.lua
+++ b/scripts/context/lua/mtx-mptopdf.lua
@@ -108,7 +108,7 @@ function scripts.mptopdf.convertall()
end
end
-logs.extendbanner("MetaPost to PDF Converter 0.51",true)
+logs.extendbanner("MetaPost to PDF Converter 0.51")
messages.help = [[
--rawmp raw metapost run
diff --git a/scripts/context/lua/mtx-package.lua b/scripts/context/lua/mtx-package.lua
index b36fc0ed8..d62a988b9 100644
--- a/scripts/context/lua/mtx-package.lua
+++ b/scripts/context/lua/mtx-package.lua
@@ -55,7 +55,7 @@ function scripts.package.merge_luatex_files(name,strip)
end
end
-logs.extendbanner("Distribution Related Goodies 0.10",true)
+logs.extendbanner("Distribution Related Goodies 0.10")
messages.help = [[
--merge merge 'loadmodule' into merge file
diff --git a/scripts/context/lua/mtx-patterns.lua b/scripts/context/lua/mtx-patterns.lua
index 293016991..c3817e9a8 100644
--- a/scripts/context/lua/mtx-patterns.lua
+++ b/scripts/context/lua/mtx-patterns.lua
@@ -6,81 +6,103 @@ if not modules then modules = { } end modules ['mtx-patterns'] = {
license = "see context related readme files"
}
-local format = string.format
+local format, find, concat = string.format, string.find, table.concat
scripts = scripts or { }
scripts.patterns = scripts.patterns or { }
scripts.patterns.list = {
- { "??", "hyph-ar.tex", "arabic" },
+ -- no patterns for arabic
+-- { "ar", "hyph-ar.tex", "arabic" },
+ -- not supported
+-- { "as", "hyph-as.tex", "assamese" },
{ "bg", "hyph-bg.tex", "bulgarian" },
+ -- not supported
+-- { "bn", "hyph-bn.tex", "bengali" },
{ "ca", "hyph-ca.tex", "catalan" },
- { "??", "hyph-cop.tex", "coptic" },
+ -- not supported
+-- { "cop", "hyph-cop.tex", "coptic" },
{ "cs", "hyph-cs.tex", "czech" },
{ "cy", "hyph-cy.tex", "welsh" },
{ "da", "hyph-da.tex", "danish" },
{ "deo", "hyph-de-1901.tex", "german, old spelling" },
{ "de", "hyph-de-1996.tex", "german, new spelling" },
+ { "??", "hyph-de-ch-1901.tex", "swiss german" },
--~ { "??", "hyph-el-monoton.tex", "" },
--~ { "??", "hyph-el-polyton.tex", "" },
- { "agr", "hyph-grc", "ancient greek" },
---~ { "???", "hyph-x-ibycus", "ancient greek in ibycus encoding" },
---~ { "gr", "", "" },
- { "eo", "hyph-eo.tex", "esperanto" },
+ { "agr", "hyph-grc.tex", "ancient greek" },
{ "gb", "hyph-en-gb.tex", "british english" },
{ "us", "hyph-en-us.tex", "american english" },
+--~ { "gr", "", "" },
+ -- these patterns do not satisfy the rules of 'clean patterns'
+-- { "eo", "hyph-eo.tex", "esperanto" },
{ "es", "hyph-es.tex", "spanish" },
{ "et", "hyph-et.tex", "estonian" },
- { "eu", "hyph-eu.tex", "basque" }, -- ba is Bashkir!
- { "fa", "hyph-fa.tex", "farsi" },
+ { "eu", "hyph-eu.tex", "basque" },
+ -- no patterns for farsi/persian
+-- { "fa", "hyph-fa.tex", "farsi" },
{ "fi", "hyph-fi.tex", "finnish" },
{ "fr", "hyph-fr.tex", "french" },
--- { "??", "hyph-ga.tex", "" },
--- { "??", "hyph-gl.tex", "" },
--- { "??", "hyph-grc.tex", "" },
+ { "??", "hyph-ga.tex", "irish" },
+ { "??", "hyph-gl.tex", "galician" },
+ -- not supported
+-- { "gu", "hyph-gu.tex", "gujarati" },
+ -- not supported
+-- { "hi", "hyph-hi.tex", "hindi" },
{ "hr", "hyph-hr.tex", "croatian" },
{ "??", "hyph-hsb.tex", "upper sorbian" },
{ "hu", "hyph-hu.tex", "hungarian" },
+ -- not supported
+-- { "hy", "hyph-hy.tex", "armenian" },
{ "??", "hyph-ia.tex", "interlingua" },
{ "??", "hyph-id.tex", "indonesian" },
{ "is", "hyph-is.tex", "icelandic" },
{ "it", "hyph-it.tex", "italian" },
+ { "??", "hyph-kmr.tex", "kurmanji" },
+ -- not supported
+-- { "kn", "hyph-kn.tex", "kannada" },
{ "la", "hyph-la.tex", "latin" },
+ -- not supported
+-- { "lo", "hyph-lo.tex", "lao" },
{ "lt", "hyph-lt.tex", "lithuanian" },
+ { "??", "hyph-lv.tex", "latvian" },
{ "mn", "hyph-mn-cyrl.tex", "mongolian, cyrillic script" },
{ "nb", "hyph-nb.tex", "norwegian bokmål" },
{ "nl", "hyph-nl.tex", "dutch" },
{ "nn", "hyph-nn.tex", "norwegian nynorsk" },
+ -- not supported
+-- { "or", "hyph-or.tex", "oriya" },
+ -- not supported
+-- { "pa", "hyph-pa.tex", "panjabi" },
+ -- not supported
+-- { "", "hyph-.tex", "" },
{ "pl", "hyph-pl.tex", "polish" },
{ "pt", "hyph-pt.tex", "portuguese" },
{ "ro", "hyph-ro.tex", "romanian" },
{ "ru", "hyph-ru.tex", "russian" },
+ -- not supported
+-- { "sa", "hyph-sa.tex", "sanskrit" },
{ "sk", "hyph-sk.tex", "slovak" },
{ "sl", "hyph-sl.tex", "slovenian" },
+ -- TODO: there is both Cyrillic and Latin script available
{ "sr", "hyph-sr-cyrl.tex", "serbian" },
{ "sv", "hyph-sv.tex", "swedish" },
+ -- not supported
+-- { "ta", "hyph-ta.tex", "tamil" },
+ -- not supported
+-- { "te", "hyph-te.tex", "telugu" },
+ { "tk", "hyph-tk.tex", "turkmen" },
{ "tr", "hyph-tr.tex", "turkish" },
- { "tk", "hyph-tk.tex", "turkman" },
{ "uk", "hyph-uk.tex", "ukrainian" },
{ "zh", "hyph-zh-latn.tex", "zh-latn, chinese Pinyin" },
}
-
-- stripped down from lpeg example:
local utf = unicode.utf8
-local cont = lpeg.R("\128\191") -- continuation byte
-
-local utf8 = lpeg.R("\0\127")
- + lpeg.R("\194\223") * cont
- + lpeg.R("\224\239") * cont * cont
- + lpeg.R("\240\244") * cont * cont * cont
-
-local validutf = (utf8^0/function() return true end) * (lpeg.P(-1)/function() return false end)
-
function utf.check(str)
- return lpeg.match(validutf,str)
+ return lpeg.match(lpeg.patterns.validutf8,str)
end
local permitted_commands = table.tohash {
@@ -174,8 +196,8 @@ function scripts.patterns.load(path,name,mnemonic,fullcheck)
data = data:gsub(" *[\n\r]+","\n")
local patterns = data:match("\\patterns[%s]*{[%s]*(.-)[%s]*}") or ""
local hyphenations = data:match("\\hyphenation[%s]*{[%s]*(.-)[%s]*}") or ""
- patterns = patterns:gsub(" +","\n")
- hyphenations = hyphenations:gsub(" +","\n")
+ patterns = patterns:gsub("[ \t]+","\n")
+ hyphenations = hyphenations:gsub("[ \t]+","\n")
local p, h = { }, { }
local pats, hyps = { } , { }
local pused, hused = { } , { }
@@ -262,27 +284,83 @@ function scripts.patterns.load(path,name,mnemonic,fullcheck)
end
end
-function scripts.patterns.save(destination,mnemonic,patterns,hyphenations,comment,stripped,pused,hused)
+function scripts.patterns.save(destination,mnemonic,name,patterns,hyphenations,comment,stripped,pused,hused)
local nofpatterns = #patterns
local nofhyphenations = #hyphenations
- local pu = table.concat(table.sortedkeys(pused), " ")
- local hu = table.concat(table.sortedkeys(hused), " ")
logs.simple("language %s has %s patterns and %s exceptions",mnemonic,nofpatterns,nofhyphenations)
if mnemonic ~= "??" then
+ local pu = concat(table.sortedkeys(pused), " ")
+ local hu = concat(table.sortedkeys(hused), " ")
+
local rmefile = file.join(destination,"lang-"..mnemonic..".rme")
local patfile = file.join(destination,"lang-"..mnemonic..".pat")
local hypfile = file.join(destination,"lang-"..mnemonic..".hyp")
+ local luafile = file.join(destination,"lang-"..mnemonic..".lua") -- suffix might change to llg
+
local topline = "% generated by mtxrun --script pattern --convert"
local banner = "% for comment and copyright, see " .. rmefile
logs.simple("saving language data for %s",mnemonic)
if not comment or comment == "" then comment = "% no comment" end
if not type(destination) == "string" then destination = "." end
+
+ local lines = string.splitlines(comment)
+ for i=1,#lines do
+ if not find(lines[i],"^%%") then
+ lines[i] = "% " .. lines[i]
+ end
+ end
+
+ local metadata = {
+ -- texcomment = comment,
+ texcomment = concat(lines,"\n"),
+ source = name,
+ mnemonic = mnemonic,
+ }
+
+ local patterndata, hyphenationdata
+ if nofpatterns > 0 then
+ patterndata = {
+ n = nofpatterns,
+ data = concat(patterns," ") or nil,
+ characters = concat(table.sortedkeys(pused),""),
+ minhyphenmin = 1, -- determined by pattern author
+ minhyphenmax = 1, -- determined by pattern author
+ }
+ else
+ patterndata = {
+ n = nofpatterns,
+ }
+ end
+ if nofhyphenations > 0 then
+ hyphenationdata = {
+ n = nofhyphenations,
+ data = concat(hyphenations," "),
+ characters = concat(table.sortedkeys(hused),""),
+ }
+ else
+ hyphenationdata = {
+ n = nofhyphenations,
+ }
+ end
+ local data = {
+ -- a prelude to language goodies, like we have font goodies and in
+ -- mkiv we can use this file directly
+ version = "1.001",
+ comment = topline,
+ metadata = metadata,
+ patterns = patterndata,
+ exceptions = hyphenationdata,
+ }
+
os.remove(rmefile)
os.remove(patfile)
os.remove(hypfile)
+ os.remove(luafile)
+
io.savedata(rmefile,format("%s\n\n%s",topline,comment))
- io.savedata(patfile,format("%s\n\n%s\n\n%% used: %s\n\n\\patterns{\n%s}",topline,banner,pu,table.concat(patterns,"\n")))
- io.savedata(hypfile,format("%s\n\n%s\n\n%% used: %s\n\n\\hyphenation{\n%s}",topline,banner,hu,table.concat(hyphenations,"\n")))
+ io.savedata(patfile,format("%s\n\n%s\n\n%% used: %s\n\n\\patterns{\n%s}",topline,banner,pu,concat(patterns,"\n")))
+ io.savedata(hypfile,format("%s\n\n%s\n\n%% used: %s\n\n\\hyphenation{\n%s}",topline,banner,hu,concat(hyphenations,"\n")))
+ io.savedata(luafile,table.serialize(data,true))
end
end
@@ -330,7 +408,7 @@ function scripts.patterns.convert()
logs.simple("converting language %s, file %s", mnemonic, name)
local okay, patterns, hyphenations, comment, stripped, pused, hused = scripts.patterns.load(path,name,false)
if okay then
- scripts.patterns.save(destination,mnemonic,patterns,hyphenations,comment,stripped,pused,hused)
+ scripts.patterns.save(destination,mnemonic,name,patterns,hyphenations,comment,stripped,pused,hused)
else
logs.simple("convertion aborted due to error(s)")
end
@@ -340,11 +418,13 @@ function scripts.patterns.convert()
end
end
-logs.extendbanner("ConTeXt Pattern File Management 0.20",true)
+logs.extendbanner("ConTeXt Pattern File Management 0.20")
messages.help = [[
--convert generate context language files (mnemonic driven, if not given then all)
--check check pattern file (or those used by context when no file given)
+--path source path where hyph-foo.tex files are stored
+--destination destination path
--fast only report filenames, no lines
]]
diff --git a/scripts/context/lua/mtx-profile.lua b/scripts/context/lua/mtx-profile.lua
index 11d48d039..e9a69762b 100644
--- a/scripts/context/lua/mtx-profile.lua
+++ b/scripts/context/lua/mtx-profile.lua
@@ -154,7 +154,7 @@ end
--~ scripts.profiler.analyse("t:/manuals/mk/mk-fonts-profile.lua")
--~ scripts.profiler.analyse("t:/manuals/mk/mk-introduction-profile.lua")
-logs.extendbanner("ConTeXt MkIV LuaTeX Profiler 1.00",true)
+logs.extendbanner("ConTeXt MkIV LuaTeX Profiler 1.00")
messages.help = [[
--analyse analyse lua calls
diff --git a/scripts/context/lua/mtx-scite.lua b/scripts/context/lua/mtx-scite.lua
index d5f0a5344..4034599c0 100644
--- a/scripts/context/lua/mtx-scite.lua
+++ b/scripts/context/lua/mtx-scite.lua
@@ -150,7 +150,7 @@ function scripts.scite.start(indeed)
end
end
-logs.extendbanner("Scite Startup Script 1.00",true)
+logs.extendbanner("Scite Startup Script 1.00")
messages.help = [[
--start [--verbose] start scite
diff --git a/scripts/context/lua/mtx-texworks.lua b/scripts/context/lua/mtx-texworks.lua
index 73ab846cd..47a949b6c 100644
--- a/scripts/context/lua/mtx-texworks.lua
+++ b/scripts/context/lua/mtx-texworks.lua
@@ -83,7 +83,7 @@ function scripts.texworks.start(indeed)
end
end
-logs.extendbanner("TeXworks Startup Script 1.00",true)
+logs.extendbanner("TeXworks Startup Script 1.00")
messages.help = [[
--start [--verbose] start texworks
diff --git a/scripts/context/lua/mtx-tools.lua b/scripts/context/lua/mtx-tools.lua
index bf4add168..50b35c847 100644
--- a/scripts/context/lua/mtx-tools.lua
+++ b/scripts/context/lua/mtx-tools.lua
@@ -144,7 +144,7 @@ function scripts.tools.dirtoxml()
end
-logs.extendbanner("Some File Related Goodies 1.01",true)
+logs.extendbanner("Some File Related Goodies 1.01")
messages.help = [[
--disarmutfbomb remove utf bomb if present
diff --git a/scripts/context/lua/mtx-update.lua b/scripts/context/lua/mtx-update.lua
index b56083d38..6552215bb 100644
--- a/scripts/context/lua/mtx-update.lua
+++ b/scripts/context/lua/mtx-update.lua
@@ -144,6 +144,11 @@ scripts.update.platforms = {
["solaris"] = "solaris-sparc",
}
+scripts.update.selfscripts = {
+ "mtxrun",
+ -- "luatools",
+}
+
-- the list is filled up later (when we know what modules to download)
scripts.update.modules = {
@@ -197,6 +202,7 @@ function scripts.update.synchronize()
dir.mkdirs(format("%s/%s", texroot, "texmf-cache"))
dir.mkdirs(format("%s/%s", texroot, "texmf-local"))
dir.mkdirs(format("%s/%s", texroot, "texmf-project"))
+ dir.mkdirs(format("%s/%s", texroot, "texmf-fonts"))
end
if ok or not force then
@@ -330,14 +336,12 @@ function scripts.update.synchronize()
end
end
end
- if logs.verbose then
- for k, v in next, combined do
- logs.report("update", k)
- for i=1,#v do
- logs.report("update", " <= " .. v[i])
- end
- end
- end
+ --~ for k, v in next, combined do
+ --~ logs.report("update", k)
+ --~ for i=1,#v do
+ --~ logs.report("update", " <= " .. v[i])
+ --~ end
+ --~ end
for destination, archive in next, combined do
local archives, command = concat(archive," "), ""
-- local normalflags, deleteflags = states.get("rsync.flags.normal"), states.get("rsync.flags.delete")
@@ -381,8 +385,9 @@ function scripts.update.synchronize()
end
for platform, _ in next, platforms do
- update_script('luatools',platform)
- update_script('mtxrun',platform)
+ for i=1, #scripts.update.selfscripts do
+ update_script(scripts.update.selfscripts[i],platform)
+ end
end
else
@@ -397,7 +402,7 @@ function scripts.update.synchronize()
-- update filename database for pdftex/xetex
scripts.update.run("mktexlsr")
-- update filename database for luatex
- scripts.update.run("luatools --generate")
+ scripts.update.run(format('mtxrun --tree="%s" --generate',texroot))
logs.report("update","done")
end
@@ -425,7 +430,7 @@ function scripts.update.make()
resolvers.load_tree(texroot)
scripts.update.run("mktexlsr")
- scripts.update.run("luatools --generate")
+ scripts.update.run(format('mtxrun --tree="%s" --generate',texroot))
local askedformats = formats
local texformats = table.tohash(scripts.update.texformats)
@@ -444,7 +449,7 @@ function scripts.update.make()
if formatlist ~= "" then
for engine in next, engines do
if engine == "luatex" then
- scripts.update.run(format("context --make")) -- maybe also formatlist
+ scripts.update.run(format('mtxrun --tree="%s" --script context --autogenerate --make',texroot))
else
-- todo: just handle make here or in mtxrun --script context --make
scripts.update.run(format("texexec --make --all --fast --%s %s",engine,formatlist))
@@ -459,11 +464,11 @@ function scripts.update.make()
logs.report("make", "use --force to really make formats")
end
scripts.update.run("mktexlsr")
- scripts.update.run("luatools --generate")
+ scripts.update.run(format('mtxrun --tree="%s" --generate',texroot))
logs.report("make","done")
end
-logs.extendbanner("ConTeXt Minimals Updater 0.21",true)
+logs.extendbanner("ConTeXt Minimals Updater 0.21")
messages.help = [[
--platform=string platform (windows, linux, linux-64, osx-intel, osx-ppc, linux-ppc)
diff --git a/scripts/context/lua/mtx-watch.lua b/scripts/context/lua/mtx-watch.lua
index 10f01cf86..617d73f90 100644
--- a/scripts/context/lua/mtx-watch.lua
+++ b/scripts/context/lua/mtx-watch.lua
@@ -356,7 +356,7 @@ function scripts.watch.cleanup_stale_files() -- removes duplicates
end
end
-logs.extendbanner("ConTeXt Request Watchdog 1.00",true)
+logs.extendbanner("ConTeXt Request Watchdog 1.00")
messages.help = [[
--logpath optional path for log files
diff --git a/scripts/context/lua/mtxrun.lua b/scripts/context/lua/mtxrun.lua
index b99327692..46db66493 100644
--- a/scripts/context/lua/mtxrun.lua
+++ b/scripts/context/lua/mtxrun.lua
@@ -38,8 +38,6 @@ if not modules then modules = { } end modules ['mtxrun'] = {
-- remember for subruns: _CTX_K_S_#{original}_
-- remember for subruns: TEXMFSTART.#{original} [tex.rb texmfstart.rb]
-texlua = true
-
-- begin library merge
@@ -97,13 +95,6 @@ function string:unquote()
return (gsub(self,"^([\"\'])(.*)%1$","%2"))
end
---~ function string:unquote()
---~ if find(self,"^[\'\"]") then
---~ return sub(self,2,-2)
---~ else
---~ return self
---~ end
---~ end
function string:quote() -- we could use format("%q")
return format("%q",self)
@@ -126,11 +117,6 @@ function string:limit(n,sentinel)
end
end
---~ function string:strip() -- the .- is quite efficient
---~ -- return match(self,"^%s*(.-)%s*$") or ""
---~ -- return match(self,'^%s*(.*%S)') or '' -- posted on lua list
---~ return find(s,'^%s*$') and '' or match(s,'^%s*(.*%S)')
---~ end
do -- roberto's variant:
local space = lpeg.S(" \t\v\n")
@@ -217,13 +203,6 @@ function is_number(str) -- tonumber
return find(str,"^[%-%+]?[%d]-%.?[%d+]$") == 1
end
---~ print(is_number("1"))
---~ print(is_number("1.1"))
---~ print(is_number(".1"))
---~ print(is_number("-0.1"))
---~ print(is_number("+0.1"))
---~ print(is_number("-.1"))
---~ print(is_number("+.1"))
function string:split_settings() -- no {} handling, see l-aux for lpeg variant
if find(self,"=") then
@@ -278,18 +257,6 @@ function string:totable()
return lpegmatch(pattern,self)
end
---~ local t = {
---~ "1234567123456712345671234567",
---~ "a\tb\tc",
---~ "aa\tbb\tcc",
---~ "aaa\tbbb\tccc",
---~ "aaaa\tbbbb\tcccc",
---~ "aaaaa\tbbbbb\tccccc",
---~ "aaaaaa\tbbbbbb\tcccccc",
---~ }
---~ for k,v do
---~ print(string.tabtospace(t[k]))
---~ end
function string.tabtospace(str,tab)
-- we don't handle embedded newlines
@@ -390,6 +357,11 @@ patterns.whitespace = patterns.eol + patterns.spacer
patterns.nonwhitespace = 1 - patterns.whitespace
patterns.utf8 = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
patterns.utfbom = P('\000\000\254\255') + P('\255\254\000\000') + P('\255\254') + P('\254\255') + P('\239\187\191')
+patterns.validutf8 = patterns.utf8^0 * P(-1) * Cc(true) + Cc(false)
+
+patterns.undouble = P('"')/"" * (1-P('"'))^0 * P('"')/""
+patterns.unsingle = P("'")/"" * (1-P("'"))^0 * P("'")/""
+patterns.unspacer = ((patterns.spacer^1)/"")^0
function lpeg.anywhere(pattern) --slightly adapted from website
return P { P(pattern) + 1 * V(1) } -- why so complex?
@@ -412,10 +384,6 @@ end
patterns.textline = content
---~ local p = lpeg.splitat("->",false) print(match(p,"oeps->what->more")) -- oeps what more
---~ local p = lpeg.splitat("->",true) print(match(p,"oeps->what->more")) -- oeps what->more
---~ local p = lpeg.splitat("->",false) print(match(p,"oeps")) -- oeps
---~ local p = lpeg.splitat("->",true) print(match(p,"oeps")) -- oeps
local splitters_s, splitters_m = { }, { }
@@ -484,19 +452,7 @@ function string:checkedsplit(separator)
return match(c,self)
end
---~ function lpeg.append(list,pp)
---~ local p = pp
---~ for l=1,#list do
---~ if p then
---~ p = p + P(list[l])
---~ else
---~ p = P(list[l])
---~ end
---~ end
---~ return p
---~ end
---~ from roberto's site:
local f1 = string.byte
@@ -506,6 +462,53 @@ local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 6
patterns.utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+local cache = { }
+
+function lpeg.stripper(str)
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+end
+
+function lpeg.replacer(t)
+ if #t > 0 then
+ local p
+ for i=1,#t do
+ local ti= t[i]
+ local pp = P(ti[1]) / ti[2]
+ p = (p and p + pp ) or pp
+ end
+ return Cs((p + 1)^0)
+ end
+end
+
+
+local splitters_f, splitters_s = { }, { }
+
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = C((1 - separator)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = (1 - separator)^0 * separator * C(P(1)^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+
end -- of closure
@@ -783,9 +786,6 @@ function table.one_entry(t) -- obolete, use inline code instead
return n and not next(t,n)
end
---~ function table.starts_at(t) -- obsolete, not nice anyway
---~ return ipairs(t,1)(t,0)
---~ end
function table.tohash(t,value)
local h = { }
@@ -806,12 +806,6 @@ function table.fromhash(t)
return h
end
---~ print(table.serialize(t), "\n")
---~ print(table.serialize(t,"name"), "\n")
---~ print(table.serialize(t,false), "\n")
---~ print(table.serialize(t,true), "\n")
---~ print(table.serialize(t,"name",true), "\n")
---~ print(table.serialize(t,"name",true,true), "\n")
table.serialize_functions = true
table.serialize_compact = true
@@ -871,8 +865,7 @@ local function do_serialize(root,name,depth,level,indexed)
if indexed then
handle(format("%s{",depth))
elseif name then
- --~ handle(format("%s%s={",depth,key(name)))
- if type(name) == "number" then -- or find(k,"^%d+$") then
+ if type(name) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s[0x%04X]={",depth,name))
else
@@ -901,10 +894,8 @@ local function do_serialize(root,name,depth,level,indexed)
for i=1,#sk do
local k = sk[i]
local v = root[k]
- --~ if v == root then
- -- circular
- --~ else
- local t = type(v)
+ -- circular
+ local t = type(v)
if compact and first and type(k) == "number" and k >= first and k <= last then
if t == "number" then
if hexify then
@@ -947,12 +938,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s __p__=nil,",depth))
end
elseif t == "number" then
- --~ if hexify then
- --~ handle(format("%s %s=0x%04X,",depth,key(k),v))
- --~ else
- --~ handle(format("%s %s=%s,",depth,key(k),v)) -- %.99g
- --~ end
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
else
@@ -973,8 +959,7 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "string" then
if reduce and tonumber(v) then
- --~ handle(format("%s %s=%s,",depth,key(k),v))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]=%s,",depth,k,v))
else
@@ -986,8 +971,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%q]=%s,",depth,k,v))
end
else
- --~ handle(format("%s %s=%q,",depth,key(k),v))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]=%q,",depth,k,v))
else
@@ -1001,8 +985,7 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "table" then
if not next(v) then
- --~ handle(format("%s %s={},",depth,key(k)))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]={},",depth,k))
else
@@ -1016,8 +999,7 @@ local function do_serialize(root,name,depth,level,indexed)
elseif inline then
local st = simple_table(v)
if st then
- --~ handle(format("%s %s={ %s },",depth,key(k),concat(st,", ")))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
else
@@ -1035,8 +1017,7 @@ local function do_serialize(root,name,depth,level,indexed)
do_serialize(v,k,depth,level+1)
end
elseif t == "boolean" then
- --~ handle(format("%s %s=%s,",depth,key(k),tostring(v)))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]=%s,",depth,k,tostring(v)))
else
@@ -1049,8 +1030,7 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "function" then
if functions then
- --~ handle(format('%s %s=loadstring(%q),',depth,key(k),dump(v)))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]=loadstring(%q),",depth,k,dump(v)))
else
@@ -1063,8 +1043,7 @@ local function do_serialize(root,name,depth,level,indexed)
end
end
else
- --~ handle(format("%s %s=%q,",depth,key(k),tostring(v)))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
else
@@ -1076,8 +1055,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%q]=%q,",depth,k,tostring(v)))
end
end
- --~ end
- end
+ end
end
if level > 0 then
handle(format("%s},",depth))
@@ -1118,19 +1096,11 @@ local function serialize(root,name,_handle,_reduce,_noquotes,_hexify)
handle("t={")
end
if root and next(root) then
- do_serialize(root,name,"",0,indexed)
+ do_serialize(root,name,"",0)
end
handle("}")
end
---~ name:
---~
---~ true : return { }
---~ false : { }
---~ nil : t = { }
---~ string : string = { }
---~ 'return' : return { }
---~ number : [number] = { }
function table.serialize(root,name,reduce,noquotes,hexify)
local t = { }
@@ -1353,9 +1323,6 @@ function table.swapped(t)
return s
end
---~ function table.are_equal(a,b)
---~ return table.serialize(a) == table.serialize(b)
---~ end
function table.clone(t,p) -- t is optional or nil or table
if not p then
@@ -1421,6 +1388,17 @@ function table.insert_after_value(t,value,extra)
insert(t,#t+1,extra)
end
+function table.sequenced(t,sep)
+ local s = { }
+ for k, v in next, t do -- indexed?
+ s[#s+1] = k .. "=" .. tostring(v)
+ end
+ return concat(s, sep or " | ")
+end
+
+function table.print(...)
+ print(table.serialize(...))
+end
end -- of closure
@@ -1756,17 +1734,6 @@ function set.contains(n,s)
end
end
---~ local c = set.create{'aap','noot','mies'}
---~ local s = set.tonumber(c)
---~ local t = set.totable(s)
---~ print(t['aap'])
---~ local c = set.create{'zus','wim','jet'}
---~ local s = set.tonumber(c)
---~ local t = set.totable(s)
---~ print(t['aap'])
---~ print(t['jet'])
---~ print(set.contains(t,'jet'))
---~ print(set.contains(t,'aap'))
@@ -1784,29 +1751,97 @@ if not modules then modules = { } end modules ['l-os'] = {
-- maybe build io.flush in os.execute
-local find, format, gsub = string.find, string.format, string.gsub
+local find, format, gsub, upper = string.find, string.format, string.gsub, string.upper
local random, ceil = math.random, math.ceil
+local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
+local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
+
+-- The following code permits traversing the environment table, at least
+-- in luatex. Internally all environment names are uppercase.
+
+if not os.__getenv__ then
+
+ os.__getenv__ = os.getenv
+ os.__setenv__ = os.setenv
+
+ if os.env then
-local execute, spawn, exec, ioflush = os.execute, os.spawn or os.execute, os.exec or os.execute, io.flush
+ local osgetenv = os.getenv
+ local ossetenv = os.setenv
+ local osenv = os.env local _ = osenv.PATH -- initialize the table
+
+ function os.setenv(k,v)
+ if v == nil then
+ v = ""
+ end
+ local K = upper(k)
+ osenv[K] = v
+ ossetenv(K,v)
+ end
+
+ function os.getenv(k)
+ local K = upper(k)
+ local v = osenv[K] or osenv[k] or osgetenv(K) or osgetenv(k)
+ if v == "" then
+ return nil
+ else
+ return v
+ end
+ end
+
+ else
+
+ local ossetenv = os.setenv
+ local osgetenv = os.getenv
+ local osenv = { }
+
+ function os.setenv(k,v)
+ if v == nil then
+ v = ""
+ end
+ local K = upper(k)
+ osenv[K] = v
+ end
+
+ function os.getenv(k)
+ local K = upper(k)
+ local v = osenv[K] or osgetenv(K) or osgetenv(k)
+ if v == "" then
+ return nil
+ else
+ return v
+ end
+ end
+
+ local function __index(t,k)
+ return os.getenv(k)
+ end
+ local function __newindex(t,k,v)
+ os.setenv(k,v)
+ end
+
+ os.env = { }
+
+ setmetatable(os.env, { __index = __index, __newindex = __newindex } )
+
+ end
+
+end
+
+-- end of environment hack
+
+local execute, spawn, exec, iopopen, ioflush = os.execute, os.spawn or os.execute, os.exec or os.execute, io.popen, io.flush
function os.execute(...) ioflush() return execute(...) end
function os.spawn (...) ioflush() return spawn (...) end
function os.exec (...) ioflush() return exec (...) end
+function io.popen (...) ioflush() return iopopen(...) end
function os.resultof(command)
- ioflush() -- else messed up logging
local handle = io.popen(command,"r")
- if not handle then
- -- print("unknown command '".. command .. "' in os.resultof")
- return ""
- else
- return handle:read("*all") or ""
- end
+ return handle and handle:read("*all") or ""
end
---~ os.type : windows | unix (new, we already guessed os.platform)
---~ os.name : windows | msdos | linux | macosx | solaris | .. | generic (new)
---~ os.platform : extended os.name with architecture
if not io.fileseparator then
if find(os.getenv("PATH"),";") then
@@ -1856,11 +1891,6 @@ function os.runtime()
return os.gettimeofday() - startuptime
end
---~ print(os.gettimeofday()-os.time())
---~ os.sleep(1.234)
---~ print (">>",os.runtime())
---~ print(os.date("%H:%M:%S",os.gettimeofday()))
---~ print(os.date("%H:%M:%S",os.time()))
-- no need for function anymore as we have more clever code and helpers now
-- this metatable trickery might as well disappear
@@ -1878,24 +1908,6 @@ end
setmetatable(os,osmt)
-if not os.setenv then
-
- -- we still store them but they won't be seen in
- -- child processes although we might pass them some day
- -- using command concatination
-
- local env, getenv = { }, os.getenv
-
- function os.setenv(k,v)
- env[k] = v
- end
-
- function os.getenv(k)
- return env[k] or getenv(k)
- end
-
-end
-
-- we can use HOSTTYPE on some platforms
local name, platform = os.name or "linux", os.getenv("MTX_PLATFORM") or ""
@@ -2016,7 +2028,7 @@ elseif name == "kfreebsd" then
-- we sometims have HOSTTYPE set so let's check that first
local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
if find(architecture,"x86_64") then
- platform = "kfreebsd-64"
+ platform = "kfreebsd-amd64"
else
platform = "kfreebsd-i386"
end
@@ -2093,59 +2105,81 @@ if not modules then modules = { } end modules ['l-file'] = {
file = file or { }
-local concat = table.concat
+local insert, concat = table.insert, table.concat
local find, gmatch, match, gsub, sub, char = string.find, string.gmatch, string.match, string.gsub, string.sub, string.char
local lpegmatch = lpeg.match
+local getcurrentdir = lfs.currentdir
-function file.removesuffix(filename)
- return (gsub(filename,"%.[%a%d]+$",""))
+local function dirname(name,default)
+ return match(name,"^(.+)[/\\].-$") or (default or "")
end
-function file.addsuffix(filename, suffix)
- if not suffix or suffix == "" then
- return filename
- elseif not find(filename,"%.[%a%d]+$") then
- return filename .. "." .. suffix
- else
- return filename
- end
+local function basename(name)
+ return match(name,"^.+[/\\](.-)$") or name
end
-function file.replacesuffix(filename, suffix)
- return (gsub(filename,"%.[%a%d]+$","")) .. "." .. suffix
+local function nameonly(name)
+ return (gsub(match(name,"^.+[/\\](.-)$") or name,"%..*$",""))
end
-function file.dirname(name,default)
- return match(name,"^(.+)[/\\].-$") or (default or "")
+local function extname(name,default)
+ return match(name,"^.+%.([^/\\]-)$") or default or ""
end
-function file.basename(name)
- return match(name,"^.+[/\\](.-)$") or name
+local function splitname(name)
+ local n, s = match(name,"^(.+)%.([^/\\]-)$")
+ return n or name, s or ""
end
-function file.nameonly(name)
- return (gsub(match(name,"^.+[/\\](.-)$") or name,"%..*$",""))
+file.basename = basename
+file.dirname = dirname
+file.nameonly = nameonly
+file.extname = extname
+file.suffix = extname
+
+function file.removesuffix(filename)
+ return (gsub(filename,"%.[%a%d]+$",""))
end
-function file.extname(name,default)
- return match(name,"^.+%.([^/\\]-)$") or default or ""
+function file.addsuffix(filename, suffix, criterium)
+ if not suffix or suffix == "" then
+ return filename
+ elseif criterium == true then
+ return filename .. "." .. suffix
+ elseif not criterium then
+ local n, s = splitname(filename)
+ if not s or s == "" then
+ return filename .. "." .. suffix
+ else
+ return filename
+ end
+ else
+ local n, s = splitname(filename)
+ if s and s ~= "" then
+ local t = type(criterium)
+ if t == "table" then
+ -- keep if in criterium
+ for i=1,#criterium do
+ if s == criterium[i] then
+ return filename
+ end
+ end
+ elseif t == "string" then
+ -- keep if criterium
+ if s == criterium then
+ return filename
+ end
+ end
+ end
+ return n .. "." .. suffix
+ end
end
-file.suffix = file.extname
---~ function file.join(...)
---~ local pth = concat({...},"/")
---~ pth = gsub(pth,"\\","/")
---~ local a, b = match(pth,"^(.*://)(.*)$")
---~ if a and b then
---~ return a .. gsub(b,"//+","/")
---~ end
---~ a, b = match(pth,"^(//)(.*)$")
---~ if a and b then
---~ return a .. gsub(b,"//+","/")
---~ end
---~ return (gsub(pth,"//+","/"))
---~ end
+function file.replacesuffix(filename, suffix)
+ return (gsub(filename,"%.[%a%d]+$","")) .. "." .. suffix
+end
+
local trick_1 = char(1)
local trick_2 = "^" .. trick_1 .. "/+"
@@ -2173,18 +2207,9 @@ function file.join(...)
return (gsub(pth,"//+","/"))
end
---~ print(file.join("//","/y"))
---~ print(file.join("/","/y"))
---~ print(file.join("","/y"))
---~ print(file.join("/x/","/y"))
---~ print(file.join("x/","/y"))
---~ print(file.join("http://","/y"))
---~ print(file.join("http://a","/y"))
---~ print(file.join("http:///a","/y"))
---~ print(file.join("//nas-1","/y"))
function file.iswritable(name)
- local a = lfs.attributes(name) or lfs.attributes(file.dirname(name,"."))
+ local a = lfs.attributes(name) or lfs.attributes(dirname(name,"."))
return a and sub(a.permissions,2,2) == "w"
end
@@ -2198,17 +2223,6 @@ file.is_writable = file.iswritable
-- todo: lpeg
---~ function file.split_path(str)
---~ local t = { }
---~ str = gsub(str,"\\", "/")
---~ str = gsub(str,"(%a):([;/])", "%1\001%2")
---~ for name in gmatch(str,"([^;:]+)") do
---~ if name ~= "" then
---~ t[#t+1] = gsub(name,"\001",":")
---~ end
---~ end
---~ return t
---~ end
local checkedsplit = string.checkedsplit
@@ -2223,31 +2237,62 @@ end
-- we can hash them weakly
-function file.collapse_path(str)
+
+function file.collapse_path(str,anchor)
+ if anchor and not find(str,"^/") and not find(str,"^%a:") then
+ str = getcurrentdir() .. "/" .. str
+ end
+ if str == "" or str =="." then
+ return "."
+ elseif find(str,"^%.%.") then
+ str = gsub(str,"\\","/")
+ return str
+ elseif not find(str,"%.") then
+ str = gsub(str,"\\","/")
+ return str
+ end
str = gsub(str,"\\","/")
- if find(str,"/") then
- str = gsub(str,"^%./",(gsub(lfs.currentdir(),"\\","/")) .. "/") -- ./xx in qualified
- str = gsub(str,"/%./","/")
- local n, m = 1, 1
- while n > 0 or m > 0 do
- str, n = gsub(str,"[^/%.]+/%.%.$","")
- str, m = gsub(str,"[^/%.]+/%.%./","")
+ local starter, rest = match(str,"^(%a+:/*)(.-)$")
+ if starter then
+ str = rest
+ end
+ local oldelements = checkedsplit(str,"/")
+ local newelements = { }
+ local i = #oldelements
+ while i > 0 do
+ local element = oldelements[i]
+ if element == '.' then
+ -- do nothing
+ elseif element == '..' then
+ local n = i -1
+ while n > 0 do
+ local element = oldelements[n]
+ if element ~= '..' and element ~= '.' then
+ oldelements[n] = '.'
+ break
+ else
+ n = n - 1
+ end
+ end
+ if n < 1 then
+ insert(newelements,1,'..')
+ end
+ elseif element ~= "" then
+ insert(newelements,1,element)
end
- str = gsub(str,"([^/])/$","%1")
- -- str = gsub(str,"^%./","") -- ./xx in qualified
- str = gsub(str,"/%.$","")
+ i = i - 1
+ end
+ if #newelements == 0 then
+ return starter or "."
+ elseif starter then
+ return starter .. concat(newelements, '/')
+ elseif find(str,"^/") then
+ return "/" .. concat(newelements,'/')
+ else
+ return concat(newelements, '/')
end
- if str == "" then str = "." end
- return str
end
---~ print(file.collapse_path("/a"))
---~ print(file.collapse_path("a/./b/.."))
---~ print(file.collapse_path("a/aa/../b/bb"))
---~ print(file.collapse_path("a/../.."))
---~ print(file.collapse_path("a/.././././b/.."))
---~ print(file.collapse_path("a/./././b/.."))
---~ print(file.collapse_path("a/b/c/../.."))
function file.robustname(str)
return (gsub(str,"[^%a%d%/%-%.\\]+","-"))
@@ -2262,92 +2307,23 @@ end
-- lpeg variants, slightly faster, not always
---~ local period = lpeg.P(".")
---~ local slashes = lpeg.S("\\/")
---~ local noperiod = 1-period
---~ local noslashes = 1-slashes
---~ local name = noperiod^1
-
---~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.C(noperiod^1) * -1
-
---~ function file.extname(name)
---~ return lpegmatch(pattern,name) or ""
---~ end
-
---~ local pattern = lpeg.Cs(((period * noperiod^1 * -1)/"" + 1)^1)
-
---~ function file.removesuffix(name)
---~ return lpegmatch(pattern,name)
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^1 * lpeg.C(noslashes^1) * -1
-
---~ function file.basename(name)
---~ return lpegmatch(pattern,name) or name
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^1 * lpeg.Cp() * noslashes^1 * -1
-
---~ function file.dirname(name)
---~ local p = lpegmatch(pattern,name)
---~ if p then
---~ return sub(name,1,p-2)
---~ else
---~ return ""
---~ end
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.Cp() * noperiod^1 * -1
-
---~ function file.addsuffix(name, suffix)
---~ local p = lpegmatch(pattern,name)
---~ if p then
---~ return name
---~ else
---~ return name .. "." .. suffix
---~ end
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.Cp() * noperiod^1 * -1
-
---~ function file.replacesuffix(name,suffix)
---~ local p = lpegmatch(pattern,name)
---~ if p then
---~ return sub(name,1,p-2) .. "." .. suffix
---~ else
---~ return name .. "." .. suffix
---~ end
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^0 * lpeg.Cp() * ((noperiod^1 * period)^1 * lpeg.Cp() + lpeg.P(true)) * noperiod^1 * -1
-
---~ function file.nameonly(name)
---~ local a, b = lpegmatch(pattern,name)
---~ if b then
---~ return sub(name,a,b-2)
---~ elseif a then
---~ return sub(name,a)
---~ else
---~ return name
---~ end
---~ end
-
---~ local test = file.extname
---~ local test = file.basename
---~ local test = file.dirname
---~ local test = file.addsuffix
---~ local test = file.replacesuffix
---~ local test = file.nameonly
-
---~ print(1,test("./a/b/c/abd.def.xxx","!!!"))
---~ print(2,test("./../b/c/abd.def.xxx","!!!"))
---~ print(3,test("a/b/c/abd.def.xxx","!!!"))
---~ print(4,test("a/b/c/def.xxx","!!!"))
---~ print(5,test("a/b/c/def","!!!"))
---~ print(6,test("def","!!!"))
---~ print(7,test("def.xxx","!!!"))
-
---~ local tim = os.clock() for i=1,250000 do local ext = test("abd.def.xxx","!!!") end print(os.clock()-tim)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-- also rewrite previous
@@ -2387,14 +2363,6 @@ end
-- test { "/aa", "/aa/bb", "/aa/bb/cc", "/aa/bb/cc.dd", "/aa/bb/cc.dd.ee" }
-- test { "aa", "aa/bb", "aa/bb/cc", "aa/bb/cc.dd", "aa/bb/cc.dd.ee" }
---~ -- todo:
---~
---~ if os.type == "windows" then
---~ local currentdir = lfs.currentdir
---~ function lfs.currentdir()
---~ return (gsub(currentdir(),"\\","/"))
---~ end
---~ end
end -- of closure
@@ -2420,18 +2388,6 @@ if not md5.HEX then function md5.HEX(str) return convert(str,"%02X") end end
if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end
if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end
---~ if not md5.HEX then
---~ local function remap(chr) return format("%02X",byte(chr)) end
---~ function md5.HEX(str) return (gsub(md5.sum(str),".",remap)) end
---~ end
---~ if not md5.hex then
---~ local function remap(chr) return format("%02x",byte(chr)) end
---~ function md5.hex(str) return (gsub(md5.sum(str),".",remap)) end
---~ end
---~ if not md5.dec then
---~ local function remap(chr) return format("%03i",byte(chr)) end
---~ function md5.dec(str) return (gsub(md5.sum(str),".",remap)) end
---~ end
file.needs_updating_threshold = 1
@@ -2487,9 +2443,10 @@ if not modules then modules = { } end modules ['l-url'] = {
license = "see context related readme files"
}
-local char, gmatch, gsub = string.char, string.gmatch, string.gsub
+local char, gmatch, gsub, format, byte = string.char, string.gmatch, string.gsub, string.format, string.byte
+local concat = table.concat
local tonumber, type = tonumber, type
-local lpegmatch = lpeg.match
+local lpegmatch, lpegP, lpegC, lpegR, lpegS, lpegCs, lpegCc = lpeg.match, lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc
-- from the spec (on the web):
--
@@ -2507,22 +2464,35 @@ local function tochar(s)
return char(tonumber(s,16))
end
-local colon, qmark, hash, slash, percent, endofstring = lpeg.P(":"), lpeg.P("?"), lpeg.P("#"), lpeg.P("/"), lpeg.P("%"), lpeg.P(-1)
+local colon, qmark, hash, slash, percent, endofstring = lpegP(":"), lpegP("?"), lpegP("#"), lpegP("/"), lpegP("%"), lpegP(-1)
-local hexdigit = lpeg.R("09","AF","af")
-local plus = lpeg.P("+")
-local escaped = (plus / " ") + (percent * lpeg.C(hexdigit * hexdigit) / tochar)
+local hexdigit = lpegR("09","AF","af")
+local plus = lpegP("+")
+local nothing = lpegCc("")
+local escaped = (plus / " ") + (percent * lpegC(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
-local scheme = lpeg.Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon + lpeg.Cc("")
-local authority = slash * slash * lpeg.Cs((escaped+(1- slash-qmark-hash))^0) + lpeg.Cc("")
-local path = slash * lpeg.Cs((escaped+(1- qmark-hash))^0) + lpeg.Cc("")
-local query = qmark * lpeg.Cs((escaped+(1- hash))^0) + lpeg.Cc("")
-local fragment = hash * lpeg.Cs((escaped+(1- endofstring))^0) + lpeg.Cc("")
+local scheme = lpegCs((escaped+(1-colon-slash-qmark-hash))^2) * colon + nothing
+local authority = slash * slash * lpegCs((escaped+(1- slash-qmark-hash))^0) + nothing
+local path = slash * lpegCs((escaped+(1- qmark-hash))^0) + nothing
+local query = qmark * lpegCs((escaped+(1- hash))^0) + nothing
+local fragment = hash * lpegCs((escaped+(1- endofstring))^0) + nothing
local parser = lpeg.Ct(scheme * authority * path * query * fragment)
+lpeg.patterns.urlsplitter = parser
+
+local escapes = { }
+
+for i=0,255 do
+ escapes[i] = format("%%%02X",i)
+end
+
+local escaper = lpeg.Cs((lpegR("09","AZ","az") + lpegS("-./_") + lpegP(1) / escapes)^0)
+
+lpeg.patterns.urlescaper = escaper
+
-- todo: reconsider Ct as we can as well have five return values (saves a table)
-- so we can have two parsers, one with and one without
@@ -2535,15 +2505,27 @@ end
function url.hashed(str)
local s = url.split(str)
local somescheme = s[1] ~= ""
- return {
- scheme = (somescheme and s[1]) or "file",
- authority = s[2],
- path = s[3],
- query = s[4],
- fragment = s[5],
- original = str,
- noscheme = not somescheme,
- }
+ if not somescheme then
+ return {
+ scheme = "file",
+ authority = "",
+ path = str,
+ query = "",
+ fragment = "",
+ original = str,
+ noscheme = true,
+ }
+ else
+ return {
+ scheme = s[1],
+ authority = s[2],
+ path = s[3],
+ query = s[4],
+ fragment = s[5],
+ original = str,
+ noscheme = false,
+ }
+ end
end
function url.hasscheme(str)
@@ -2554,15 +2536,25 @@ function url.addscheme(str,scheme)
return (url.hasscheme(str) and str) or ((scheme or "file:///") .. str)
end
-function url.construct(hash)
- local fullurl = hash.sheme .. "://".. hash.authority .. hash.path
- if hash.query then
- fullurl = fullurl .. "?".. hash.query
+function url.construct(hash) -- dodo: we need to escape !
+ local fullurl = { }
+ local scheme, authority, path, query, fragment = hash.scheme, hash.authority, hash.path, hash.query, hash.fragment
+ if scheme and scheme ~= "" then
+ fullurl[#fullurl+1] = scheme .. "://"
+ end
+ if authority and authority ~= "" then
+ fullurl[#fullurl+1] = authority
end
- if hash.fragment then
- fullurl = fullurl .. "?".. hash.fragment
+ if path and path ~= "" then
+ fullurl[#fullurl+1] = "/" .. path
end
- return fullurl
+ if query and query ~= "" then
+ fullurl[#fullurl+1] = "?".. query
+ end
+ if fragment and fragment ~= "" then
+ fullurl[#fullurl+1] = "#".. fragment
+ end
+ return lpegmatch(escaper,concat(fullurl))
end
function url.filename(filename)
@@ -2582,37 +2574,12 @@ function url.query(str)
end
end
---~ print(url.filename("file:///c:/oeps.txt"))
---~ print(url.filename("c:/oeps.txt"))
---~ print(url.filename("file:///oeps.txt"))
---~ print(url.filename("file:///etc/test.txt"))
---~ print(url.filename("/oeps.txt"))
-
---~ from the spec on the web (sort of):
---~
---~ function test(str)
---~ print(table.serialize(url.hashed(str)))
---~ end
---~
---~ test("%56pass%20words")
---~ test("file:///c:/oeps.txt")
---~ test("file:///c|/oeps.txt")
---~ test("file:///etc/oeps.txt")
---~ test("file://./etc/oeps.txt")
---~ test("file:////etc/oeps.txt")
---~ test("ftp://ftp.is.co.za/rfc/rfc1808.txt")
---~ test("http://www.ietf.org/rfc/rfc2396.txt")
---~ test("ldap://[2001:db8::7]/c=GB?objectClass?one#what")
---~ test("mailto:John.Doe@example.com")
---~ test("news:comp.infosystems.www.servers.unix")
---~ test("tel:+1-816-555-1212")
---~ test("telnet://192.0.2.16:80/")
---~ test("urn:oasis:names:specification:docbook:dtd:xml:4.1.2")
---~ test("/etc/passwords")
---~ test("http://www.pragma-ade.com/spaced%20name")
-
---~ test("zip:///oeps/oeps.zip#bla/bla.tex")
---~ test("zip:///oeps/oeps.zip?bla/bla.tex")
+
+
+
+
+
+
end -- of closure
@@ -2767,11 +2734,6 @@ end
dir.glob = glob
---~ list = dir.glob("**/*.tif")
---~ list = dir.glob("/**/*.tif")
---~ list = dir.glob("./**/*.tif")
---~ list = dir.glob("oeps/**/*.tif")
---~ list = dir.glob("/oeps/**/*.tif")
local function globfiles(path,recurse,func,files) -- func == pattern or function
if type(func) == "string" then
@@ -2815,10 +2777,6 @@ function dir.ls(pattern)
return table.concat(glob(pattern),"\n")
end
---~ mkdirs("temp")
---~ mkdirs("a/b/c")
---~ mkdirs(".","/a/b/c")
---~ mkdirs("a","b","c")
local make_indeed = true -- false
@@ -2878,17 +2836,6 @@ if string.find(os.getenv("PATH"),";") then -- os.type == "windows"
return pth, (lfs.isdir(pth) == true)
end
---~ print(dir.mkdirs("","","a","c"))
---~ print(dir.mkdirs("a"))
---~ print(dir.mkdirs("a:"))
---~ print(dir.mkdirs("a:/b/c"))
---~ print(dir.mkdirs("a:b/c"))
---~ print(dir.mkdirs("a:/bbb/c"))
---~ print(dir.mkdirs("/a/b/c"))
---~ print(dir.mkdirs("/aaa/b/c"))
---~ print(dir.mkdirs("//a/b/c"))
---~ print(dir.mkdirs("///a/b/c"))
---~ print(dir.mkdirs("a/bbb//ccc/"))
function dir.expand_name(str) -- will be merged with cleanpath and collapsepath
local first, nothing, last = match(str,"^(//)(//*)(.*)$")
@@ -2928,7 +2875,7 @@ else
local str, pth, t = "", "", { ... }
for i=1,#t do
local s = t[i]
- if s ~= "" then
+ if s and s ~= "" then -- we catch nil and false
if str ~= "" then
str = str .. "/" .. s
else
@@ -2962,13 +2909,6 @@ else
return pth, (lfs.isdir(pth) == true)
end
---~ print(dir.mkdirs("","","a","c"))
---~ print(dir.mkdirs("a"))
---~ print(dir.mkdirs("/a/b/c"))
---~ print(dir.mkdirs("/aaa/b/c"))
---~ print(dir.mkdirs("//a/b/c"))
---~ print(dir.mkdirs("///a/b/c"))
---~ print(dir.mkdirs("a/bbb//ccc/"))
function dir.expand_name(str) -- will be merged with cleanpath and collapsepath
if not find(str,"^/") then
@@ -3025,7 +2965,7 @@ function toboolean(str,tolerant)
end
end
-function string.is_boolean(str)
+function string.is_boolean(str,default)
if type(str) == "string" then
if str == "true" or str == "yes" or str == "on" or str == "t" then
return true
@@ -3033,7 +2973,7 @@ function string.is_boolean(str)
return false
end
end
- return nil
+ return default
end
function boolean.alwaystrue()
@@ -3049,6 +2989,211 @@ end -- of closure
do -- create closure to overcome 200 locals limit
+if not modules then modules = { } end modules ['l-unicode'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if not unicode then
+
+ unicode = { utf8 = { } }
+
+ local floor, char = math.floor, string.char
+
+ function unicode.utf8.utfchar(n)
+ if n < 0x80 then
+ return char(n)
+ elseif n < 0x800 then
+ return char(0xC0 + floor(n/0x40)) .. char(0x80 + (n % 0x40))
+ elseif n < 0x10000 then
+ return char(0xE0 + floor(n/0x1000)) .. char(0x80 + (floor(n/0x40) % 0x40)) .. char(0x80 + (n % 0x40))
+ elseif n < 0x40000 then
+ return char(0xF0 + floor(n/0x40000)) .. char(0x80 + floor(n/0x1000)) .. char(0x80 + (floor(n/0x40) % 0x40)) .. char(0x80 + (n % 0x40))
+ else -- wrong:
+ -- return char(0xF1 + floor(n/0x1000000)) .. char(0x80 + floor(n/0x40000)) .. char(0x80 + floor(n/0x1000)) .. char(0x80 + (floor(n/0x40) % 0x40)) .. char(0x80 + (n % 0x40))
+ return "?"
+ end
+ end
+
+end
+
+utf = utf or unicode.utf8
+
+local concat, utfchar, utfgsub = table.concat, utf.char, utf.gsub
+local char, byte, find, bytepairs = string.char, string.byte, string.find, string.bytepairs
+
+-- 0 EF BB BF UTF-8
+-- 1 FF FE UTF-16-little-endian
+-- 2 FE FF UTF-16-big-endian
+-- 3 FF FE 00 00 UTF-32-little-endian
+-- 4 00 00 FE FF UTF-32-big-endian
+
+unicode.utfname = {
+ [0] = 'utf-8',
+ [1] = 'utf-16-le',
+ [2] = 'utf-16-be',
+ [3] = 'utf-32-le',
+ [4] = 'utf-32-be'
+}
+
+-- \000 fails in <= 5.0 but is valid in >=5.1 where %z is depricated
+
+function unicode.utftype(f)
+ local str = f:read(4)
+ if not str then
+ f:seek('set')
+ return 0
+ -- elseif find(str,"^%z%z\254\255") then -- depricated
+ -- elseif find(str,"^\000\000\254\255") then -- not permitted and bugged
+ elseif find(str,"\000\000\254\255",1,true) then -- seems to work okay (TH)
+ return 4
+ -- elseif find(str,"^\255\254%z%z") then -- depricated
+ -- elseif find(str,"^\255\254\000\000") then -- not permitted and bugged
+ elseif find(str,"\255\254\000\000",1,true) then -- seems to work okay (TH)
+ return 3
+ elseif find(str,"^\254\255") then
+ f:seek('set',2)
+ return 2
+ elseif find(str,"^\255\254") then
+ f:seek('set',2)
+ return 1
+ elseif find(str,"^\239\187\191") then
+ f:seek('set',3)
+ return 0
+ else
+ f:seek('set')
+ return 0
+ end
+end
+
+function unicode.utf16_to_utf8(str, endian) -- maybe a gsub is faster or an lpeg
+ local result, tmp, n, m, p = { }, { }, 0, 0, 0
+ -- lf | cr | crlf / (cr:13, lf:10)
+ local function doit()
+ if n == 10 then
+ if p ~= 13 then
+ result[#result+1] = concat(tmp)
+ tmp = { }
+ p = 0
+ end
+ elseif n == 13 then
+ result[#result+1] = concat(tmp)
+ tmp = { }
+ p = n
+ else
+ tmp[#tmp+1] = utfchar(n)
+ p = 0
+ end
+ end
+ for l,r in bytepairs(str) do
+ if r then
+ if endian then
+ n = l*256 + r
+ else
+ n = r*256 + l
+ end
+ if m > 0 then
+ n = (m-0xD800)*0x400 + (n-0xDC00) + 0x10000
+ m = 0
+ doit()
+ elseif n >= 0xD800 and n <= 0xDBFF then
+ m = n
+ else
+ doit()
+ end
+ end
+ end
+ if #tmp > 0 then
+ result[#result+1] = concat(tmp)
+ end
+ return result
+end
+
+function unicode.utf32_to_utf8(str, endian)
+ local result = { }
+ local tmp, n, m, p = { }, 0, -1, 0
+ -- lf | cr | crlf / (cr:13, lf:10)
+ local function doit()
+ if n == 10 then
+ if p ~= 13 then
+ result[#result+1] = concat(tmp)
+ tmp = { }
+ p = 0
+ end
+ elseif n == 13 then
+ result[#result+1] = concat(tmp)
+ tmp = { }
+ p = n
+ else
+ tmp[#tmp+1] = utfchar(n)
+ p = 0
+ end
+ end
+ for a,b in bytepairs(str) do
+ if a and b then
+ if m < 0 then
+ if endian then
+ m = a*256*256*256 + b*256*256
+ else
+ m = b*256 + a
+ end
+ else
+ if endian then
+ n = m + a*256 + b
+ else
+ n = m + b*256*256*256 + a*256*256
+ end
+ m = -1
+ doit()
+ end
+ else
+ break
+ end
+ end
+ if #tmp > 0 then
+ result[#result+1] = concat(tmp)
+ end
+ return result
+end
+
+local function little(c)
+ local b = byte(c) -- b = c:byte()
+ if b < 0x10000 then
+ return char(b%256,b/256)
+ else
+ b = b - 0x10000
+ local b1, b2 = b/1024 + 0xD800, b%1024 + 0xDC00
+ return char(b1%256,b1/256,b2%256,b2/256)
+ end
+end
+
+local function big(c)
+ local b = byte(c)
+ if b < 0x10000 then
+ return char(b/256,b%256)
+ else
+ b = b - 0x10000
+ local b1, b2 = b/1024 + 0xD800, b%1024 + 0xDC00
+ return char(b1/256,b1%256,b2/256,b2%256)
+ end
+end
+
+function unicode.utf8_to_utf16(str,littleendian)
+ if littleendian then
+ return char(255,254) .. utfgsub(str,".",little)
+ else
+ return char(254,255) .. utfgsub(str,".",big)
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
if not modules then modules = { } end modules ['l-math'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -3106,7 +3251,7 @@ if not modules then modules = { } end modules ['l-utils'] = {
-- hm, quite unreadable
-local gsub = string.gsub
+local gsub, format = string.gsub, string.format
local concat = table.concat
local type, next = type, next
@@ -3114,81 +3259,79 @@ if not utils then utils = { } end
if not utils.merger then utils.merger = { } end
if not utils.lua then utils.lua = { } end
-utils.merger.m_begin = "begin library merge"
-utils.merger.m_end = "end library merge"
-utils.merger.pattern =
+utils.report = utils.report or print
+
+local merger = utils.merger
+
+merger.strip_comment = true
+
+local m_begin_merge = "begin library merge"
+local m_end_merge = "end library merge"
+local m_begin_closure = "do -- create closure to overcome 200 locals limit"
+local m_end_closure = "end -- of closure"
+
+local m_pattern =
"%c+" ..
- "%-%-%s+" .. utils.merger.m_begin ..
+ "%-%-%s+" .. m_begin_merge ..
"%c+(.-)%c+" ..
- "%-%-%s+" .. utils.merger.m_end ..
+ "%-%-%s+" .. m_end_merge ..
"%c+"
-function utils.merger._self_fake_()
- return
- "-- " .. "created merged file" .. "\n\n" ..
- "-- " .. utils.merger.m_begin .. "\n\n" ..
- "-- " .. utils.merger.m_end .. "\n\n"
-end
+local m_format =
+ "\n\n-- " .. m_begin_merge ..
+ "\n%s\n" ..
+ "-- " .. m_end_merge .. "\n\n"
-function utils.report(...)
- print(...)
+local m_faked =
+ "-- " .. "created merged file" .. "\n\n" ..
+ "-- " .. m_begin_merge .. "\n\n" ..
+ "-- " .. m_end_merge .. "\n\n"
+
+local function self_fake()
+ return m_faked
end
-utils.merger.strip_comment = true
+local function self_nothing()
+ return ""
+end
-function utils.merger._self_load_(name)
- local f, data = io.open(name), ""
- if f then
- utils.report("reading merge from %s",name)
- data = f:read("*all")
- f:close()
+local function self_load(name)
+ local data = io.loaddata(name) or ""
+ if data == "" then
+ utils.report("merge: unknown file %s",name)
else
- utils.report("unknown file to merge %s",name)
- end
- if data and utils.merger.strip_comment then
- -- saves some 20K
- data = gsub(data,"%-%-~[^\n\r]*[\r\n]", "")
+ utils.report("merge: inserting %s",name)
end
return data or ""
end
-function utils.merger._self_save_(name, data)
+local function self_save(name, data)
if data ~= "" then
- local f = io.open(name,'w')
- if f then
- utils.report("saving merge from %s",name)
- f:write(data)
- f:close()
+ if merger.strip_comment then
+ -- saves some 20K
+ local n = #data
+ data = gsub(data,"%-%-~[^\n\r]*[\r\n]","")
+ utils.report("merge: %s bytes of comment stripped, %s bytes of code left",n-#data,#data)
end
+ io.savedata(name,data)
+ utils.report("merge: saving %s",name)
end
end
-function utils.merger._self_swap_(data,code)
- if data ~= "" then
- return (gsub(data,utils.merger.pattern, function(s)
- return "\n\n" .. "-- "..utils.merger.m_begin .. "\n" .. code .. "\n" .. "-- "..utils.merger.m_end .. "\n\n"
- end, 1))
- else
- return ""
- end
+local function self_swap(data,code)
+ return data ~= "" and (gsub(data,m_pattern, function() return format(m_format,code) end, 1)) or ""
end
---~ stripper:
---~
---~ data = gsub(data,"%-%-~[^\n]*\n","")
---~ data = gsub(data,"\n\n+","\n")
-
-function utils.merger._self_libs_(libs,list)
- local result, f, frozen = { }, nil, false
+local function self_libs(libs,list)
+ local result, f, frozen, foundpath = { }, nil, false, nil
result[#result+1] = "\n"
if type(libs) == 'string' then libs = { libs } end
if type(list) == 'string' then list = { list } end
- local foundpath = nil
for i=1,#libs do
local lib = libs[i]
for j=1,#list do
local pth = gsub(list[j],"\\","/") -- file.clean_path
- utils.report("checking library path %s",pth)
+ utils.report("merge: checking library path %s",pth)
local name = pth .. "/" .. lib
if lfs.isfile(name) then
foundpath = pth
@@ -3197,76 +3340,58 @@ function utils.merger._self_libs_(libs,list)
if foundpath then break end
end
if foundpath then
- utils.report("using library path %s",foundpath)
+ utils.report("merge: using library path %s",foundpath)
local right, wrong = { }, { }
for i=1,#libs do
local lib = libs[i]
local fullname = foundpath .. "/" .. lib
if lfs.isfile(fullname) then
- -- right[#right+1] = lib
- utils.report("merging library %s",fullname)
- result[#result+1] = "do -- create closure to overcome 200 locals limit"
+ utils.report("merge: using library %s",fullname)
+ right[#right+1] = lib
+ result[#result+1] = m_begin_closure
result[#result+1] = io.loaddata(fullname,true)
- result[#result+1] = "end -- of closure"
+ result[#result+1] = m_end_closure
else
- -- wrong[#wrong+1] = lib
- utils.report("no library %s",fullname)
+ utils.report("merge: skipping library %s",fullname)
+ wrong[#wrong+1] = lib
end
end
if #right > 0 then
- utils.report("merged libraries: %s",concat(right," "))
+ utils.report("merge: used libraries: %s",concat(right," "))
end
if #wrong > 0 then
- utils.report("skipped libraries: %s",concat(wrong," "))
+ utils.report("merge: skipped libraries: %s",concat(wrong," "))
end
else
- utils.report("no valid library path found")
+ utils.report("merge: no valid library path found")
end
return concat(result, "\n\n")
end
-function utils.merger.selfcreate(libs,list,target)
+function merger.selfcreate(libs,list,target)
if target then
- utils.merger._self_save_(
- target,
- utils.merger._self_swap_(
- utils.merger._self_fake_(),
- utils.merger._self_libs_(libs,list)
- )
- )
- end
-end
-
-function utils.merger.selfmerge(name,libs,list,target)
- utils.merger._self_save_(
- target or name,
- utils.merger._self_swap_(
- utils.merger._self_load_(name),
- utils.merger._self_libs_(libs,list)
- )
- )
+ self_save(target,self_swap(self_fake(),self_libs(libs,list)))
+ end
end
-function utils.merger.selfclean(name)
- utils.merger._self_save_(
- name,
- utils.merger._self_swap_(
- utils.merger._self_load_(name),
- ""
- )
- )
+function merger.selfmerge(name,libs,list,target)
+ self_save(target or name,self_swap(self_load(name),self_libs(libs,list)))
end
-function utils.lua.compile(luafile, lucfile, cleanup, strip) -- defaults: cleanup=false strip=true
- -- utils.report("compiling",luafile,"into",lucfile)
+function merger.selfclean(name)
+ self_save(name,self_swap(self_load(name),self_nothing()))
+end
+
+function utils.lua.compile(luafile,lucfile,cleanup,strip) -- defaults: cleanup=false strip=true
+ utils.report("lua: compiling %s into %s",luafile,lucfile)
os.remove(lucfile)
local command = "-o " .. string.quote(lucfile) .. " " .. string.quote(luafile)
if strip ~= false then
command = "-s " .. command
end
- local done = (os.spawn("texluac " .. command) == 0) or (os.spawn("luac " .. command) == 0)
+ local done = os.spawn("texluac " .. command) == 0 or os.spawn("luac " .. command) == 0
if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
- -- utils.report("removing",luafile)
+ utils.report("lua: removing %s",luafile)
os.remove(luafile)
end
return done
@@ -3350,11 +3475,7 @@ end
function aux.settings_to_hash(str,existing)
if str and str ~= "" then
hash = existing or { }
- if moretolerant then
- lpegmatch(pattern_b_s,str)
- else
- lpegmatch(pattern_a_s,str)
- end
+ lpegmatch(pattern_a_s,str)
return hash
else
return { }
@@ -3484,12 +3605,6 @@ local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "")
local number = digit^1 * (case_1 + case_2)
local stripper = lpeg.Cs((number + 1)^0)
---~ local sample = "bla 11.00 bla 11 bla 0.1100 bla 1.00100 bla 0.00 bla 0.001 bla 1.1100 bla 0.100100100 bla 0.00100100100"
---~ collectgarbage("collect")
---~ str = string.rep(sample,10000)
---~ local ts = os.clock()
---~ lpegmatch(stripper,str)
---~ print(#str, os.clock()-ts, lpegmatch(stripper,sample))
lpeg.patterns.strip_zeros = stripper
@@ -3518,235 +3633,305 @@ function aux.accesstable(target)
return t
end
---~ function string.commaseparated(str)
---~ return gmatch(str,"([^,%s]+)")
---~ end
-- as we use this a lot ...
---~ function aux.cachefunction(action,weak)
---~ local cache = { }
---~ if weak then
---~ setmetatable(cache, { __mode = "kv" } )
---~ end
---~ local function reminder(str)
---~ local found = cache[str]
---~ if not found then
---~ found = action(str)
---~ cache[str] = found
---~ end
---~ return found
---~ end
---~ return reminder, cache
---~ end
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['trac-tra'] = {
+if not modules then modules = { } end modules ['trac-inf'] = {
version = 1.001,
- comment = "companion to trac-tra.mkiv",
+ comment = "companion to trac-inf.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
--- the <anonymous> tag is kind of generic and used for functions that are not
--- bound to a variable, like node.new, node.copy etc (contrary to for instance
--- node.has_attribute which is bound to a has_attribute local variable in mkiv)
+-- As we want to protect the global tables, we no longer store the timing
+-- in the tables themselves but in a hidden timers table so that we don't
+-- get warnings about assignments. This is more efficient than using rawset
+-- and rawget.
-local debug = require "debug"
+local format = string.format
+local clock = os.gettimeofday or os.clock -- should go in environment
-local getinfo = debug.getinfo
-local type, next = type, next
-local concat = table.concat
-local format, find, lower, gmatch, gsub = string.format, string.find, string.lower, string.gmatch, string.gsub
+local statusinfo, n, registered = { }, 0, { }
-debugger = debugger or { }
+statistics = statistics or { }
-local counters = { }
-local names = { }
+statistics.enable = true
+statistics.threshold = 0.05
--- one
+local timers = { }
-local function hook()
- local f = getinfo(2,"f").func
- local n = getinfo(2,"Sn")
--- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
- if f then
- local cf = counters[f]
- if cf == nil then
- counters[f] = 1
- names[f] = n
- else
- counters[f] = cf + 1
- end
- end
+local function hastiming(instance)
+ return instance and timers[instance]
end
-local function getname(func)
- local n = names[func]
- if n then
- if n.what == "C" then
- return n.name or '<anonymous>'
- else
- -- source short_src linedefined what name namewhat nups func
- local name = n.name or n.namewhat or n.what
- if not name or name == "" then name = "?" end
- return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+
+local function resettiming(instance)
+ timers[instance or "notimer"] = { timing = 0, loadtime = 0 }
+end
+
+local function starttiming(instance)
+ local timer = timers[instance or "notimer"]
+ if not timer then
+ timer = { }
+ timers[instance or "notimer"] = timer
+ end
+ local it = timer.timing
+ if not it then
+ it = 0
+ end
+ if it == 0 then
+ timer.starttime = clock()
+ if not timer.loadtime then
+ timer.loadtime = 0
end
- else
- return "unknown"
end
+ timer.timing = it + 1
end
-function debugger.showstats(printer,threshold)
- printer = printer or texio.write or print
- threshold = threshold or 0
- local total, grandtotal, functions = 0, 0, 0
- printer("\n") -- ugly but ok
- -- table.sort(counters)
- for func, count in next, counters do
- if count > threshold then
- local name = getname(func)
- if not find(name,"for generator") then
- printer(format("%8i %s", count, name))
- total = total + count
+
+local function stoptiming(instance, report)
+ local timer = timers[instance or "notimer"]
+ local it = timer.timing
+ if it > 1 then
+ timer.timing = it - 1
+ else
+ local starttime = timer.starttime
+ if starttime then
+ local stoptime = clock()
+ local loadtime = stoptime - starttime
+ timer.stoptime = stoptime
+ timer.loadtime = timer.loadtime + loadtime
+ if report then
+ statistics.report("load time %0.3f",loadtime)
end
+ timer.timing = 0
+ return loadtime
end
- grandtotal = grandtotal + count
- functions = functions + 1
end
- printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+ return 0
end
--- two
-
---~ local function hook()
---~ local n = getinfo(2)
---~ if n.what=="C" and not n.name then
---~ local f = tostring(debug.traceback())
---~ local cf = counters[f]
---~ if cf == nil then
---~ counters[f] = 1
---~ names[f] = n
---~ else
---~ counters[f] = cf + 1
---~ end
---~ end
---~ end
---~ function debugger.showstats(printer,threshold)
---~ printer = printer or texio.write or print
---~ threshold = threshold or 0
---~ local total, grandtotal, functions = 0, 0, 0
---~ printer("\n") -- ugly but ok
---~ -- table.sort(counters)
---~ for func, count in next, counters do
---~ if count > threshold then
---~ printer(format("%8i %s", count, func))
---~ total = total + count
---~ end
---~ grandtotal = grandtotal + count
---~ functions = functions + 1
---~ end
---~ printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
---~ end
+local function elapsedtime(instance)
+ local timer = timers[instance or "notimer"]
+ return format("%0.3f",timer and timer.loadtime or 0)
+end
--- rest
+local function elapsedindeed(instance)
+ local timer = timers[instance or "notimer"]
+ return (timer and timer.loadtime or 0) > statistics.threshold
+end
-function debugger.savestats(filename,threshold)
- local f = io.open(filename,'w')
- if f then
- debugger.showstats(function(str) f:write(str) end,threshold)
- f:close()
+local function elapsedseconds(instance,rest) -- returns nil if 0 seconds
+ if elapsedindeed(instance) then
+ return format("%s seconds %s", elapsedtime(instance),rest or "")
end
end
-function debugger.enable()
- debug.sethook(hook,"c")
+statistics.hastiming = hastiming
+statistics.resettiming = resettiming
+statistics.starttiming = starttiming
+statistics.stoptiming = stoptiming
+statistics.elapsedtime = elapsedtime
+statistics.elapsedindeed = elapsedindeed
+statistics.elapsedseconds = elapsedseconds
+
+-- general function
+
+function statistics.register(tag,fnc)
+ if statistics.enable and type(fnc) == "function" then
+ local rt = registered[tag] or (#statusinfo + 1)
+ statusinfo[rt] = { tag, fnc }
+ registered[tag] = rt
+ if #tag > n then n = #tag end
+ end
end
-function debugger.disable()
- debug.sethook()
---~ counters[debug.getinfo(2,"f").func] = nil
+function statistics.show(reporter)
+ if statistics.enable then
+ if not reporter then reporter = function(tag,data,n) texio.write_nl(tag .. " " .. data) end end
+ -- this code will move
+ local register = statistics.register
+ register("luatex banner", function()
+ return string.lower(status.banner)
+ end)
+ register("control sequences", function()
+ return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
+ end)
+ register("callbacks", function()
+ local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
+ return format("direct: %s, indirect: %s, total: %s", total-indirect, indirect, total)
+ end)
+ register("current memory usage", statistics.memused)
+ register("runtime",statistics.runtime)
+ for i=1,#statusinfo do
+ local s = statusinfo[i]
+ local r = s[2]()
+ if r then
+ reporter(s[1],r,n)
+ end
+ end
+ texio.write_nl("") -- final newline
+ statistics.enable = false
+ end
end
-function debugger.tracing()
- local n = tonumber(os.env['MTX.TRACE.CALLS']) or tonumber(os.env['MTX_TRACE_CALLS']) or 0
- if n > 0 then
- function debugger.tracing() return true end ; return true
+function statistics.show_job_stat(tag,data,n)
+ if type(data) == "table" then
+ for i=1,#data do
+ statistics.show_job_stat(tag,data[i],n)
+ end
else
- function debugger.tracing() return false end ; return false
+ texio.write_nl(format("%-15s: %s - %s","mkiv lua stats",tag:rpadd(n," "),data))
end
end
---~ debugger.enable()
+function statistics.memused() -- no math.round yet -)
+ local round = math.round or math.floor
+ return format("%s MB (ctx: %s MB)",round(collectgarbage("count")/1000), round(status.luastate_bytes/1000000))
+end
+
+starttiming(statistics)
+
+function statistics.formatruntime(runtime) -- indirect so it can be overloaded and
+ return format("%s seconds", runtime) -- indeed that happens in cure-uti.lua
+end
+
+function statistics.runtime()
+ stoptiming(statistics)
+ return statistics.formatruntime(elapsedtime(statistics))
+end
+
+function statistics.timed(action,report)
+ report = report or logs.simple
+ starttiming("run")
+ action()
+ stoptiming("run")
+ report("total runtime: %s",elapsedtime("run"))
+end
+
+-- where, not really the best spot for this:
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
+commands = commands or { }
+
+function commands.resettimer(name)
+ resettiming(name or "whatever")
+ starttiming(name or "whatever")
+end
---~ debugger.disable()
+function commands.elapsedtime(name)
+ stoptiming(name or "whatever")
+ tex.sprint(elapsedtime(name or "whatever"))
+end
---~ print("")
---~ debugger.showstats()
---~ print("")
---~ debugger.showstats(print,3)
-setters = setters or { }
-setters.data = setters.data or { }
+end -- of closure
---~ local function set(t,what,value)
---~ local data, done = t.data, t.done
---~ if type(what) == "string" then
---~ what = aux.settings_to_array(what) -- inefficient but ok
---~ end
---~ for i=1,#what do
---~ local w = what[i]
---~ for d, f in next, data do
---~ if done[d] then
---~ -- prevent recursion due to wildcards
---~ elseif find(d,w) then
---~ done[d] = true
---~ for i=1,#f do
---~ f[i](value)
---~ end
---~ end
---~ end
---~ end
---~ end
+do -- create closure to overcome 200 locals limit
-local function set(t,what,value)
+if not modules then modules = { } end modules ['trac-set'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local type, next, tostring = type, next, tostring
+local concat = table.concat
+local format, find, lower, gsub = string.format, string.find, string.lower, string.gsub
+local is_boolean = string.is_boolean
+
+setters = { }
+
+local data = { } -- maybe just local
+
+-- We can initialize from the cnf file. This is sort of tricky as
+-- laster defined setters also need to be initialized then. If set
+-- this way, we need to ensure that they are not reset later on.
+
+local trace_initialize = false
+
+local function report(what,filename,name,key,value)
+ texio.write_nl(format("%s setter, filename: %s, name: %s, key: %s, value: %s",what,filename,name,key,value))
+end
+
+function setters.initialize(filename,name,values) -- filename only for diagnostics
+ local data = data[name]
+ if data then
+ data = data.data
+ if data then
+ for key, value in next, values do
+ key = gsub(key,"_",".")
+ value = is_boolean(value,value)
+ local functions = data[key]
+ if functions then
+ if #functions > 0 and not functions.value then
+ if trace_initialize then
+ report("doing",filename,name,key,value)
+ end
+ for i=1,#functions do
+ functions[i](value)
+ end
+ functions.value = value
+ else
+ if trace_initialize then
+ report("skipping",filename,name,key,value)
+ end
+ end
+ else
+ -- we do a simple preregistration i.e. not in the
+ -- list as it might be an obsolete entry
+ functions = { default = value }
+ data[key] = functions
+ if trace_initialize then
+ report("storing",filename,name,key,value)
+ end
+ end
+ end
+ end
+ end
+end
+
+-- user interface code
+
+local function set(t,what,newvalue)
local data, done = t.data, t.done
if type(what) == "string" then
what = aux.settings_to_hash(what) -- inefficient but ok
end
- for w, v in next, what do
- if v == "" then
- v = value
+ for w, value in next, what do
+ if value == "" then
+ value = newvalue
+ elseif not value then
+ value = false -- catch nil
else
- v = toboolean(v)
+ value = is_boolean(value,value)
end
- for d, f in next, data do
- if done[d] then
+ for name, functions in next, data do
+ if done[name] then
-- prevent recursion due to wildcards
- elseif find(d,w) then
- done[d] = true
- for i=1,#f do
- f[i](v)
+ elseif find(name,w) then
+ done[name] = true
+ for i=1,#functions do
+ functions[i](value)
end
+ functions.value = value
end
end
end
end
local function reset(t)
- for d, f in next, t.data do
- for i=1,#f do
- f[i](false)
+ for name, functions in next, t.data do
+ for i=1,#functions do
+ functions[i](false)
end
+ functions.value = false
end
end
@@ -3767,17 +3952,26 @@ end
function setters.register(t,what,...)
local data = t.data
what = lower(what)
- local w = data[what]
- if not w then
- w = { }
- data[what] = w
+ local functions = data[what]
+ if not functions then
+ functions = { }
+ data[what] = functions
end
+ local default = functions.default -- can be set from cnf file
for _, fnc in next, { ... } do
local typ = type(fnc)
- if typ == "function" then
- w[#w+1] = fnc
- elseif typ == "string" then
- w[#w+1] = function(value) set(t,fnc,value,nesting) end
+ if typ == "string" then
+ local s = fnc -- else wrong reference
+ fnc = function(value) set(t,s,value) end
+ elseif typ ~= "function" then
+ fnc = nil
+ end
+ if fnc then
+ functions[#functions+1] = fnc
+ if default then
+ fnc(default)
+ functions.value = default
+ end
end
end
end
@@ -3818,8 +4012,16 @@ end
function setters.show(t)
commands.writestatus("","")
local list = setters.list(t)
+ local category = t.name
for k=1,#list do
- commands.writestatus(t.name,list[k])
+ local name = list[k]
+ local functions = t.data[name]
+ if functions then
+ local value, default, modules = functions.value, functions.default, #functions
+ value = value == nil and "unset" or tostring(value)
+ default = default == nil and "unset" or tostring(default)
+ commands.writestatus(category,format("%-25s modules: %2i default: %5s value: %5s",name,modules,default,value))
+ end
end
commands.writestatus("","")
end
@@ -3832,7 +4034,7 @@ end
function setters.new(name)
local t
t = {
- data = { },
+ data = { }, -- indexed, but also default and value fields
name = name,
enable = function(...) setters.enable (t,...) end,
disable = function(...) setters.disable (t,...) end,
@@ -3840,7 +4042,7 @@ function setters.new(name)
list = function(...) setters.list (t,...) end,
show = function(...) setters.show (t,...) end,
}
- setters.data[name] = t
+ data[name] = t
return t
end
@@ -3858,12 +4060,12 @@ local e = directives.enable
local d = directives.disable
function directives.enable(...)
- commands.writestatus("directives","enabling: %s",concat({...}," "))
+ (commands.writestatus or logs.report)("directives","enabling: %s",concat({...}," "))
e(...)
end
function directives.disable(...)
- commands.writestatus("directives","disabling: %s",concat({...}," "))
+ (commands.writestatus or logs.report)("directives","disabling: %s",concat({...}," "))
d(...)
end
@@ -3871,12 +4073,12 @@ local e = experiments.enable
local d = experiments.disable
function experiments.enable(...)
- commands.writestatus("experiments","enabling: %s",concat({...}," "))
+ (commands.writestatus or logs.report)("experiments","enabling: %s",concat({...}," "))
e(...)
end
function experiments.disable(...)
- commands.writestatus("experiments","disabling: %s",concat({...}," "))
+ (commands.writestatus or logs.report)("experiments","disabling: %s",concat({...}," "))
d(...)
end
@@ -3887,6 +4089,946 @@ directives.register("system.nostatistics", function(v)
end)
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['trac-tra'] = {
+ version = 1.001,
+ comment = "companion to trac-tra.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- the <anonymous> tag is kind of generic and used for functions that are not
+-- bound to a variable, like node.new, node.copy etc (contrary to for instance
+-- node.has_attribute which is bound to a has_attribute local variable in mkiv)
+
+local debug = require "debug"
+
+local getinfo = debug.getinfo
+local type, next = type, next
+local format, find = string.format, string.find
+local is_boolean = string.is_boolean
+
+debugger = debugger or { }
+
+local counters = { }
+local names = { }
+
+-- one
+
+local function hook()
+ local f = getinfo(2,"f").func
+ local n = getinfo(2,"Sn")
+-- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
+ if f then
+ local cf = counters[f]
+ if cf == nil then
+ counters[f] = 1
+ names[f] = n
+ else
+ counters[f] = cf + 1
+ end
+ end
+end
+
+local function getname(func)
+ local n = names[func]
+ if n then
+ if n.what == "C" then
+ return n.name or '<anonymous>'
+ else
+ -- source short_src linedefined what name namewhat nups func
+ local name = n.name or n.namewhat or n.what
+ if not name or name == "" then name = "?" end
+ return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+ end
+ else
+ return "unknown"
+ end
+end
+
+function debugger.showstats(printer,threshold)
+ printer = printer or texio.write or print
+ threshold = threshold or 0
+ local total, grandtotal, functions = 0, 0, 0
+ printer("\n") -- ugly but ok
+ -- table.sort(counters)
+ for func, count in next, counters do
+ if count > threshold then
+ local name = getname(func)
+ if not find(name,"for generator") then
+ printer(format("%8i %s", count, name))
+ total = total + count
+ end
+ end
+ grandtotal = grandtotal + count
+ functions = functions + 1
+ end
+ printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+end
+
+-- two
+
+
+-- rest
+
+function debugger.savestats(filename,threshold)
+ local f = io.open(filename,'w')
+ if f then
+ debugger.showstats(function(str) f:write(str) end,threshold)
+ f:close()
+ end
+end
+
+function debugger.enable()
+ debug.sethook(hook,"c")
+end
+
+function debugger.disable()
+ debug.sethook()
+end
+
+local function trace_calls(n)
+ debugger.enable()
+ luatex.register_stop_actions(function()
+ debugger.disable()
+ debugger.savestats(tex.jobname .. "-luacalls.log",tonumber(n))
+ end)
+ trace_calls = function() end
+end
+
+if directives then
+ directives.register("system.tracecalls", function(n) trace_calls(n) end) -- indirect is needed for nilling
+end
+
+
+
+
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['trac-log'] = {
+ version = 1.001,
+ comment = "companion to trac-log.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- xml logging is only usefull in normal runs, not in ini mode
+-- it looks like some tex logging (like filenames) is broken (no longer
+-- interceoted at the tex end so the xml variant is not that useable now)
+
+
+local write_nl, write = texio and texio.write_nl or print, texio and texio.write or io.write
+local format, gmatch = string.format, string.gmatch
+local texcount = tex and tex.count
+
+--[[ldx--
+<p>This is a prelude to a more extensive logging module. For the sake
+of parsing log files, in addition to the standard logging we will
+provide an <l n='xml'/> structured file. Actually, any logging that
+is hooked into callbacks will be \XML\ by default.</p>
+--ldx]]--
+
+logs = logs or { }
+
+--[[ldx--
+<p>This looks pretty ugly but we need to speed things up a bit.</p>
+--ldx]]--
+
+local moreinfo = [[
+More information about ConTeXt and the tools that come with it can be found at:
+
+maillist : ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+webpage : http://www.pragma-ade.nl / http://tex.aanhet.net
+wiki : http://contextgarden.net
+]]
+
+local functions = {
+ 'report', 'status', 'start', 'stop', 'push', 'pop', 'line', 'direct',
+ 'start_run', 'stop_run',
+ 'start_page_number', 'stop_page_number',
+ 'report_output_pages', 'report_output_log',
+ 'report_tex_stat', 'report_job_stat',
+ 'show_open', 'show_close', 'show_load',
+ 'dummy',
+}
+
+local method = "nop"
+
+function logs.set_method(newmethod)
+ method = newmethod
+ -- a direct copy might be faster but let's try this for a while
+ setmetatable(logs, { __index = logs[method] })
+end
+
+function logs.get_method()
+ return method
+end
+
+-- installer
+
+local data = { }
+
+function logs.new(category)
+ local logger = data[category]
+ if not logger then
+ logger = function(...)
+ logs.report(category,...)
+ end
+ data[category] = logger
+ end
+ return logger
+end
+
+
+
+-- nop logging (maybe use __call instead)
+
+local noplog = { } logs.nop = noplog setmetatable(logs, { __index = noplog })
+
+for i=1,#functions do
+ noplog[functions[i]] = function() end
+end
+
+-- tex logging
+
+local texlog = { } logs.tex = texlog setmetatable(texlog, { __index = noplog })
+
+function texlog.report(a,b,c,...)
+ if c then
+ write_nl(format("%-16s> %s\n",a,format(b,c,...)))
+ elseif b then
+ write_nl(format("%-16s> %s\n",a,b))
+ else
+ write_nl(format("%-16s>\n",a))
+ end
+end
+
+function texlog.status(a,b,c,...)
+ if c then
+ write_nl(format("%-16s: %s\n",a,format(b,c,...)))
+ elseif b then
+ write_nl(format("%-16s: %s\n",a,b)) -- b can have %'s
+ else
+ write_nl(format("%-16s:>\n",a))
+ end
+end
+
+function texlog.line(fmt,...) -- new
+ if fmt then
+ write_nl(format(fmt,...))
+ else
+ write_nl("")
+ end
+end
+
+local real, user, sub
+
+function texlog.start_page_number()
+ real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+end
+
+local report_pages = logs.new("pages") -- not needed but saves checking when we grep for it
+
+function texlog.stop_page_number()
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ else
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ end
+ else
+ report_pages("flushing realpage %s",real)
+ end
+ else
+ report_pages("flushing page")
+ end
+ io.flush()
+end
+
+texlog.report_job_stat = statistics and statistics.show_job_stat
+
+-- xml logging
+
+local xmllog = { } logs.xml = xmllog setmetatable(xmllog, { __index = noplog })
+
+function xmllog.report(category,fmt,s,...) -- new
+ if s then
+ write_nl(format("<r category='%s'>%s</r>",category,format(fmt,s,...)))
+ elseif fmt then
+ write_nl(format("<r category='%s'>%s</r>",category,fmt))
+ else
+ write_nl(format("<r category='%s'/>",category))
+ end
+end
+
+function xmllog.status(category,fmt,s,...)
+ if s then
+ write_nl(format("<s category='%s'>%s</r>",category,format(fmt,s,...)))
+ elseif fmt then
+ write_nl(format("<s category='%s'>%s</r>",category,fmt))
+ else
+ write_nl(format("<s category='%s'/>",category))
+ end
+end
+
+function xmllog.line(fmt,...) -- new
+ if fmt then
+ write_nl(format("<r>%s</r>",format(fmt,...)))
+ else
+ write_nl("<r/>")
+ end
+end
+
+function xmllog.start() write_nl("<%s>" ) end
+function xmllog.stop () write_nl("</%s>") end
+function xmllog.push () write_nl("<!-- ") end
+function xmllog.pop () write_nl(" -->" ) end
+
+function xmllog.start_run()
+ write_nl("<?xml version='1.0' standalone='yes'?>")
+ write_nl("<job>") -- xmlns='www.pragma-ade.com/luatex/schemas/context-job.rng'
+ write_nl("")
+end
+
+function xmllog.stop_run()
+ write_nl("</job>")
+end
+
+function xmllog.start_page_number()
+ write_nl(format("<p real='%s' page='%s' sub='%s'", texcount.realpageno, texcount.userpageno, texcount.subpageno))
+end
+
+function xmllog.stop_page_number()
+ write("/>")
+ write_nl("")
+end
+
+function xmllog.report_output_pages(p,b)
+ write_nl(format("<v k='pages' v='%s'/>", p))
+ write_nl(format("<v k='bytes' v='%s'/>", b))
+ write_nl("")
+end
+
+function xmllog.report_output_log()
+ -- nothing
+end
+
+function xmllog.report_tex_stat(k,v)
+ write_nl("log","<v k='"..k.."'>"..tostring(v).."</v>")
+end
+
+local nesting = 0
+
+function xmllog.show_open(name)
+ nesting = nesting + 1
+ write_nl(format("<f l='%s' n='%s'>",nesting,name))
+end
+
+function xmllog.show_close(name)
+ write("</f> ")
+ nesting = nesting - 1
+end
+
+function xmllog.show_load(name)
+ write_nl(format("<f l='%s' n='%s'/>",nesting+1,name))
+end
+
+-- initialization
+
+if tex and (tex.jobname or tex.formatname) then
+ -- todo: this can be set in mtxrun ... or maybe we should just forget about this alternative format
+ if (os.getenv("mtx.directives.logmethod") or os.getenv("mtx_directives_logmethod")) == "xml" then
+ logs.set_method('xml')
+ else
+ logs.set_method('tex')
+ end
+else
+ logs.set_method('nop')
+end
+
+-- logging in runners -> these are actually the nop loggers
+
+local name, banner = 'report', 'context'
+
+function noplog.report(category,fmt,...) -- todo: fmt,s
+ if fmt then
+ write_nl(format("%s | %s: %s",name,category,format(fmt,...)))
+ elseif category then
+ write_nl(format("%s | %s",name,category))
+ else
+ write_nl(format("%s |",name))
+ end
+end
+
+noplog.status = noplog.report -- just to be sure, never used
+
+function noplog.simple(fmt,...) -- todo: fmt,s
+ if fmt then
+ write_nl(format("%s | %s",name,format(fmt,...)))
+ else
+ write_nl(format("%s |",name))
+ end
+end
+
+if utils then
+ utils.report = function(...) logs.simple(...) end
+end
+
+function logs.setprogram(newname,newbanner)
+ name, banner = newname, newbanner
+end
+
+function logs.extendbanner(newbanner)
+ banner = banner .. " | ".. newbanner
+end
+
+function logs.reportlines(str) -- todo: <lines></lines>
+ for line in gmatch(str,"(.-)[\n\r]") do
+ logs.report(line)
+ end
+end
+
+function logs.reportline() -- for scripts too
+ logs.report()
+end
+
+function logs.simpleline()
+ logs.report()
+end
+
+function logs.simplelines(str) -- todo: <lines></lines>
+ for line in gmatch(str,"(.-)[\n\r]") do
+ logs.simple(line)
+ end
+end
+
+function logs.reportbanner() -- for scripts too
+ logs.report(banner)
+end
+
+function logs.help(message,option)
+ logs.reportbanner()
+ logs.reportline()
+ logs.reportlines(message)
+ if option ~= "nomoreinfo" then
+ logs.reportline()
+ logs.reportlines(moreinfo)
+ end
+end
+
+-- logging to a file
+
+
+function logs.system(whereto,process,jobname,category,...)
+ local message = format("%s %s => %s => %s => %s\r",os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...))
+ for i=1,10 do
+ local f = io.open(whereto,"a")
+ if f then
+ f:write(message)
+ f:close()
+ break
+ else
+ sleep(0.1)
+ end
+ end
+end
+
+-- bonus
+
+function logs.fatal(where,...)
+ logs.report(where,"fatal error: %s, aborting now",format(...))
+ os.exit()
+end
+
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['trac-pro'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local getmetatable, setmetatable, rawset, type = getmetatable, setmetatable, rawset, type
+
+-- The protection implemented here is probably not that tight but good enough to catch
+-- problems due to naive usage.
+--
+-- There's a more extensive version (trac-xxx.lua) that supports nesting.
+--
+-- This will change when we have _ENV in lua 5.2+
+
+local trace_namespaces = false trackers.register("system.namespaces", function(v) trace_namespaces = v end)
+
+local report_system = logs.new("system")
+
+namespaces = { }
+
+local registered = { }
+
+local function report_index(k,name)
+ if trace_namespaces then
+ report_system("reference to '%s' in protected namespace '%s', %s",k,name,debug.traceback())
+ else
+ report_system("reference to '%s' in protected namespace '%s'",k,name)
+ end
+end
+
+local function report_newindex(k,name)
+ if trace_namespaces then
+ report_system("assignment to '%s' in protected namespace '%s', %s",k,name,debug.traceback())
+ else
+ report_system("assignment to '%s' in protected namespace '%s'",k,name)
+ end
+end
+
+local function register(name)
+ local data = name == "global" and _G or _G[name]
+ if not data then
+ return -- error
+ end
+ registered[name] = data
+ local m = getmetatable(data)
+ if not m then
+ m = { }
+ setmetatable(data,m)
+ end
+ local index, newindex = { }, { }
+ m.__saved__index = m.__index
+ m.__no__index = function(t,k)
+ if not index[k] then
+ index[k] = true
+ report_index(k,name)
+ end
+ return nil
+ end
+ m.__saved__newindex = m.__newindex
+ m.__no__newindex = function(t,k,v)
+ if not newindex[k] then
+ newindex[k] = true
+ report_newindex(k,name)
+ end
+ rawset(t,k,v)
+ end
+ m.__protection__depth = 0
+end
+
+local function private(name) -- maybe save name
+ local data = registered[name]
+ if not data then
+ data = _G[name]
+ if not data then
+ data = { }
+ _G[name] = data
+ end
+ register(name)
+ end
+ return data
+end
+
+local function protect(name)
+ local data = registered[name]
+ if not data then
+ return
+ end
+ local m = getmetatable(data)
+ local pd = m.__protection__depth
+ if pd > 0 then
+ m.__protection__depth = pd + 1
+ else
+ m.__save_d_index, m.__saved__newindex = m.__index, m.__newindex
+ m.__index, m.__newindex = m.__no__index, m.__no__newindex
+ m.__protection__depth = 1
+ end
+end
+
+local function unprotect(name)
+ local data = registered[name]
+ if not data then
+ return
+ end
+ local m = getmetatable(data)
+ local pd = m.__protection__depth
+ if pd > 1 then
+ m.__protection__depth = pd - 1
+ else
+ m.__index, m.__newindex = m.__saved__index, m.__saved__newindex
+ m.__protection__depth = 0
+ end
+end
+
+local function protectall()
+ for name, _ in next, registered do
+ if name ~= "global" then
+ protect(name)
+ end
+ end
+end
+
+local function unprotectall()
+ for name, _ in next, registered do
+ if name ~= "global" then
+ unprotect(name)
+ end
+ end
+end
+
+namespaces.register = register -- register when defined
+namespaces.private = private -- allocate and register if needed
+namespaces.protect = protect
+namespaces.unprotect = unprotect
+namespaces.protectall = protectall
+namespaces.unprotectall = unprotectall
+
+namespaces.private("namespaces") registered = { } register("global") -- unreachable
+
+directives.register("system.protect", function(v)
+ if v then
+ protectall()
+ else
+ unprotectall()
+ end
+end)
+
+directives.register("system.checkglobals", function(v)
+ if v then
+ report_system("enabling global namespace guard")
+ protect("global")
+ else
+ report_system("disabling global namespace guard")
+ unprotect("global")
+ end
+end)
+
+-- dummy section (will go to luat-dum.lua)
+
+
+
+
+
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['luat-env'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- A former version provided functionality for non embeded core
+-- scripts i.e. runtime library loading. Given the amount of
+-- Lua code we use now, this no longer makes sense. Much of this
+-- evolved before bytecode arrays were available and so a lot of
+-- code has disappeared already.
+
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+
+local report_resolvers = logs.new("resolvers")
+
+local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find
+local unquote, quote = string.unquote, string.quote
+
+-- precautions
+
+os.setlocale(nil,nil) -- useless feature and even dangerous in luatex
+
+function os.setlocale()
+ -- no way you can mess with it
+end
+
+-- dirty tricks
+
+if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaonly" then
+ arg[-1] = arg[0]
+ arg[ 0] = arg[2]
+ for k=3,#arg do
+ arg[k-2] = arg[k]
+ end
+ arg[#arg] = nil -- last
+ arg[#arg] = nil -- pre-last
+end
+
+-- environment
+
+environment = environment or { }
+environment.arguments = { }
+environment.files = { }
+environment.sortedflags = nil
+
+local mt = {
+ __index = function(_,k)
+ if k == "version" then
+ local version = tex.toks and tex.toks.contextversiontoks
+ if version and version ~= "" then
+ rawset(environment,"version",version)
+ return version
+ else
+ return "unknown"
+ end
+ elseif k == "jobname" or k == "formatname" then
+ local name = tex and tex[k]
+ if name or name== "" then
+ rawset(environment,k,name)
+ return name
+ else
+ return "unknown"
+ end
+ elseif k == "outputfilename" then
+ local name = environment.jobname
+ rawset(environment,k,name)
+ return name
+ end
+ end
+}
+
+setmetatable(environment,mt)
+
+function environment.initialize_arguments(arg)
+ local arguments, files = { }, { }
+ environment.arguments, environment.files, environment.sortedflags = arguments, files, nil
+ for index=1,#arg do
+ local argument = arg[index]
+ if index > 0 then
+ local flag, value = match(argument,"^%-+(.-)=(.-)$")
+ if flag then
+ arguments[flag] = unquote(value or "")
+ else
+ flag = match(argument,"^%-+(.+)")
+ if flag then
+ arguments[flag] = true
+ else
+ files[#files+1] = argument
+ end
+ end
+ end
+ end
+ environment.ownname = environment.ownname or arg[0] or 'unknown.lua'
+end
+
+function environment.setargument(name,value)
+ environment.arguments[name] = value
+end
+
+-- todo: defaults, better checks e.g on type (boolean versus string)
+--
+-- tricky: too many hits when we support partials unless we add
+-- a registration of arguments so from now on we have 'partial'
+
+function environment.argument(name,partial)
+ local arguments, sortedflags = environment.arguments, environment.sortedflags
+ if arguments[name] then
+ return arguments[name]
+ elseif partial then
+ if not sortedflags then
+ sortedflags = table.sortedkeys(arguments)
+ for k=1,#sortedflags do
+ sortedflags[k] = "^" .. sortedflags[k]
+ end
+ environment.sortedflags = sortedflags
+ end
+ -- example of potential clash: ^mode ^modefile
+ for k=1,#sortedflags do
+ local v = sortedflags[k]
+ if find(name,v) then
+ return arguments[sub(v,2,#v)]
+ end
+ end
+ end
+ return nil
+end
+
+function environment.split_arguments(separator) -- rather special, cut-off before separator
+ local done, before, after = false, { }, { }
+ local original_arguments = environment.original_arguments
+ for k=1,#original_arguments do
+ local v = original_arguments[k]
+ if not done and v == separator then
+ done = true
+ elseif done then
+ after[#after+1] = v
+ else
+ before[#before+1] = v
+ end
+ end
+ return before, after
+end
+
+function environment.reconstruct_commandline(arg,noquote)
+ arg = arg or environment.original_arguments
+ if noquote and #arg == 1 then
+ local a = arg[1]
+ a = resolvers.resolve(a)
+ a = unquote(a)
+ return a
+ elseif #arg > 0 then
+ local result = { }
+ for i=1,#arg do
+ local a = arg[i]
+ a = resolvers.resolve(a)
+ a = unquote(a)
+ a = gsub(a,'"','\\"') -- tricky
+ if find(a," ") then
+ result[#result+1] = quote(a)
+ else
+ result[#result+1] = a
+ end
+ end
+ return table.join(result," ")
+ else
+ return ""
+ end
+end
+
+if arg then
+
+ -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later)
+ local newarg, instring = { }, false
+
+ for index=1,#arg do
+ local argument = arg[index]
+ if find(argument,"^\"") then
+ newarg[#newarg+1] = gsub(argument,"^\"","")
+ if not find(argument,"\"$") then
+ instring = true
+ end
+ elseif find(argument,"\"$") then
+ newarg[#newarg] = newarg[#newarg] .. " " .. gsub(argument,"\"$","")
+ instring = false
+ elseif instring then
+ newarg[#newarg] = newarg[#newarg] .. " " .. argument
+ else
+ newarg[#newarg+1] = argument
+ end
+ end
+ for i=1,-5,-1 do
+ newarg[i] = arg[i]
+ end
+
+ environment.initialize_arguments(newarg)
+ environment.original_arguments = newarg
+ environment.raw_arguments = arg
+
+ arg = { } -- prevent duplicate handling
+
+end
+
+-- weird place ... depends on a not yet loaded module
+
+function environment.texfile(filename)
+ return resolvers.find_file(filename,'tex')
+end
+
+function environment.luafile(filename)
+ local resolved = resolvers.find_file(filename,'tex') or ""
+ if resolved ~= "" then
+ return resolved
+ end
+ resolved = resolvers.find_file(filename,'texmfscripts') or ""
+ if resolved ~= "" then
+ return resolved
+ end
+ return resolvers.find_file(filename,'luatexlibs') or ""
+end
+
+environment.loadedluacode = loadfile -- can be overloaded
+
+function environment.luafilechunk(filename,silent) -- used for loading lua bytecode in the format
+ filename = file.replacesuffix(filename, "lua")
+ local fullname = environment.luafile(filename)
+ if fullname and fullname ~= "" then
+ local data = environment.loadedluacode(fullname)
+ if trace_locating then
+ report_resolvers("loading file %s%s", fullname, not data and " failed" or "")
+ elseif not silent then
+ texio.write("<",data and "+ " or "- ",fullname,">")
+ end
+ return data
+ else
+ if trace_locating then
+ report_resolvers("unknown file %s", filename)
+ end
+ return nil
+ end
+end
+
+-- the next ones can use the previous ones / combine
+
+function environment.loadluafile(filename, version)
+ local lucname, luaname, chunk
+ local basename = file.removesuffix(filename)
+ if basename == filename then
+ lucname, luaname = basename .. ".luc", basename .. ".lua"
+ else
+ lucname, luaname = nil, basename -- forced suffix
+ end
+ -- when not overloaded by explicit suffix we look for a luc file first
+ local fullname = (lucname and environment.luafile(lucname)) or ""
+ if fullname ~= "" then
+ if trace_locating then
+ report_resolvers("loading %s", fullname)
+ end
+ chunk = loadfile(fullname) -- this way we don't need a file exists check
+ end
+ if chunk then
+ assert(chunk)()
+ if version then
+ -- we check of the version number of this chunk matches
+ local v = version -- can be nil
+ if modules and modules[filename] then
+ v = modules[filename].version -- new method
+ elseif versions and versions[filename] then
+ v = versions[filename] -- old method
+ end
+ if v == version then
+ return true
+ else
+ if trace_locating then
+ report_resolvers("version mismatch for %s: lua=%s, luc=%s", filename, v, version)
+ end
+ environment.loadluafile(filename)
+ end
+ else
+ return true
+ end
+ end
+ fullname = (luaname and environment.luafile(luaname)) or ""
+ if fullname ~= "" then
+ if trace_locating then
+ report_resolvers("loading %s", fullname)
+ end
+ chunk = loadfile(fullname) -- this way we don't need a file exists check
+ if not chunk then
+ if trace_locating then
+ report_resolvers("unknown file %s", filename)
+ end
+ else
+ assert(chunk)()
+ return true
+ end
+ end
+ return false
+end
+
end -- of closure
@@ -3906,6 +5048,8 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
+local report_xml = logs.new("xml")
+
--[[ldx--
<p>The parser used here is inspired by the variant discussed in the lua book, but
handles comment and processing instructions, has a different structure, provides
@@ -3920,7 +5064,6 @@ optimize the code.</p>
xml = xml or { }
---~ local xml = xml
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
@@ -4044,7 +5187,7 @@ local dcache, hcache, acache = { }, { }, { }
local mt = { }
-function initialize_mt(root)
+local function initialize_mt(root)
mt = { __index = root } -- will be redefined later
end
@@ -4148,7 +5291,7 @@ local reported_attribute_errors = { }
local function attribute_value_error(str)
if not reported_attribute_errors[str] then
- logs.report("xml","invalid attribute value: %q",str)
+ report_xml("invalid attribute value: %q",str)
reported_attribute_errors[str] = true
at._error_ = str
end
@@ -4156,7 +5299,7 @@ local function attribute_value_error(str)
end
local function attribute_specification_error(str)
if not reported_attribute_errors[str] then
- logs.report("xml","invalid attribute specification: %q",str)
+ report_xml("invalid attribute specification: %q",str)
reported_attribute_errors[str] = true
at._error_ = str
end
@@ -4219,18 +5362,18 @@ local function handle_hex_entity(str)
h = unify_predefined and predefined_unified[n]
if h then
if trace_entities then
- logs.report("xml","utfize, converting hex entity &#x%s; into %s",str,h)
+ report_xml("utfize, converting hex entity &#x%s; into %s",str,h)
end
elseif utfize then
h = (n and utfchar(n)) or xml.unknown_hex_entity_format(str) or ""
if not n then
- logs.report("xml","utfize, ignoring hex entity &#x%s;",str)
+ report_xml("utfize, ignoring hex entity &#x%s;",str)
elseif trace_entities then
- logs.report("xml","utfize, converting hex entity &#x%s; into %s",str,h)
+ report_xml("utfize, converting hex entity &#x%s; into %s",str,h)
end
else
if trace_entities then
- logs.report("xml","found entity &#x%s;",str)
+ report_xml("found entity &#x%s;",str)
end
h = "&#x" .. str .. ";"
end
@@ -4246,18 +5389,18 @@ local function handle_dec_entity(str)
d = unify_predefined and predefined_unified[n]
if d then
if trace_entities then
- logs.report("xml","utfize, converting dec entity &#%s; into %s",str,d)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
elseif utfize then
d = (n and utfchar(n)) or xml.unknown_dec_entity_format(str) or ""
if not n then
- logs.report("xml","utfize, ignoring dec entity &#%s;",str)
+ report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- logs.report("xml","utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,h)
end
else
if trace_entities then
- logs.report("xml","found entity &#%s;",str)
+ report_xml("found entity &#%s;",str)
end
d = "&#" .. str .. ";"
end
@@ -4282,7 +5425,7 @@ local function handle_any_entity(str)
end
if a then
if trace_entities then
- logs.report("xml","resolved entity &%s; -> %s (internal)",str,a)
+ report_xml("resolved entity &%s; -> %s (internal)",str,a)
end
a = lpegmatch(parsedentity,a) or a
else
@@ -4291,11 +5434,11 @@ local function handle_any_entity(str)
end
if a then
if trace_entities then
- logs.report("xml","resolved entity &%s; -> %s (external)",str,a)
+ report_xml("resolved entity &%s; -> %s (external)",str,a)
end
else
if trace_entities then
- logs.report("xml","keeping entity &%s;",str)
+ report_xml("keeping entity &%s;",str)
end
if str == "" then
a = "&error;"
@@ -4307,7 +5450,7 @@ local function handle_any_entity(str)
acache[str] = a
elseif trace_entities then
if not acache[str] then
- logs.report("xml","converting entity &%s; into %s",str,a)
+ report_xml("converting entity &%s; into %s",str,a)
acache[str] = a
end
end
@@ -4316,7 +5459,7 @@ local function handle_any_entity(str)
local a = acache[str]
if not a then
if trace_entities then
- logs.report("xml","found entity &%s;",str)
+ report_xml("found entity &%s;",str)
end
a = resolve_predefined and predefined_simplified[str]
if a then
@@ -4335,7 +5478,7 @@ local function handle_any_entity(str)
end
local function handle_end_entity(chr)
- logs.report("xml","error in entity, %q found instead of ';'",chr)
+ report_xml("error in entity, %q found instead of ';'",chr)
end
local space = S(' \r\n\t')
@@ -4470,7 +5613,7 @@ local function xmlconvert(data, settings)
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, result, entities = { }, { }, { }, { }, nil, nil, settings.entities or { }
+ stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -4498,6 +5641,7 @@ local function xmlconvert(data, settings)
else
errorstr = "invalid xml file - no text at all"
end
+ local result
if errorstr and errorstr ~= "" then
result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={ }, er = true } } }
setmetatable(stack, mt)
@@ -4678,7 +5822,7 @@ local function verbose_element(e,handlers)
ats[#ats+1] = format('%s=%q',k,v)
end
end
- if ern and trace_remap and ern ~= ens then
+ if ern and trace_entities and ern ~= ens then
ens = ern
end
if ens ~= "" then
@@ -4809,7 +5953,7 @@ local function newhandlers(settings)
if settings then
for k,v in next, settings do
if type(v) == "table" then
- tk = t[k] if not tk then tk = { } t[k] = tk end
+ local tk = t[k] if not tk then tk = { } t[k] = tk end
for kk,vv in next, v do
tk[kk] = vv
end
@@ -4920,7 +6064,7 @@ local function xmltext(root) -- inline
return (root and xmltostring(root)) or ""
end
-function initialize_mt(root)
+initialize_mt = function(root) -- redefinition
mt = { __tostring = xmltext, __index = root }
end
@@ -4955,7 +6099,6 @@ xml.string = xmlstring
<p>A few helpers:</p>
--ldx]]--
---~ xmlsetproperty(root,"settings",settings)
function xml.settings(e)
while e do
@@ -5117,6 +6260,8 @@ local trace_lpath = false if trackers then trackers.register("xml.path",
local trace_lparse = false if trackers then trackers.register("xml.parse", function(v) trace_lparse = v end) end
local trace_lprofile = false if trackers then trackers.register("xml.profile", function(v) trace_lpath = v trace_lparse = v trace_lprofile = v end) end
+local report_lpath = logs.new("lpath")
+
--[[ldx--
<p>We've now arrived at an interesting part: accessing the tree using a subset
of <l n='xpath'/> and since we're not compatible we call it <l n='lpath'/>. We
@@ -5143,7 +6288,7 @@ local function fallback (t, name)
if fn then
t[name] = fn
else
- logs.report("xml","unknown sub finalizer '%s'",tostring(name))
+ report_lpath("unknown sub finalizer '%s'",tostring(name))
fn = function() end
end
return fn
@@ -5204,11 +6349,6 @@ apply_axis['root'] = function(list)
end
apply_axis['self'] = function(list)
---~ local collected = { }
---~ for l=1,#list do
---~ collected[#collected+1] = list[l]
---~ end
---~ return collected
return list
end
@@ -5335,38 +6475,10 @@ apply_axis['namespace'] = function(list)
end
apply_axis['following'] = function(list) -- incomplete
---~ local collected = { }
---~ for l=1,#list do
---~ local ll = list[l]
---~ local p = ll.__p__
---~ local d = p.dt
---~ for i=ll.ni+1,#d do
---~ local di = d[i]
---~ if type(di) == "table" then
---~ collected[#collected+1] = di
---~ break
---~ end
---~ end
---~ end
---~ return collected
return { }
end
apply_axis['preceding'] = function(list) -- incomplete
---~ local collected = { }
---~ for l=1,#list do
---~ local ll = list[l]
---~ local p = ll.__p__
---~ local d = p.dt
---~ for i=ll.ni-1,1,-1 do
---~ local di = d[i]
---~ if type(di) == "table" then
---~ collected[#collected+1] = di
---~ break
---~ end
---~ end
---~ end
---~ return collected
return { }
end
@@ -5629,14 +6741,12 @@ local converter = Cs (
)
cleaner = Cs ( (
---~ lp_fastpos +
lp_reserved +
lp_number +
lp_string +
1 )^1 )
---~ expr
local template_e = [[
local expr = xml.expressions
@@ -5687,13 +6797,13 @@ local skip = { }
local function errorrunner_e(str,cnv)
if not skip[str] then
- logs.report("lpath","error in expression: %s => %s",str,cnv)
+ report_lpath("error in expression: %s => %s",str,cnv)
skip[str] = cnv or str
end
return false
end
local function errorrunner_f(str,arg)
- logs.report("lpath","error in finalizer: %s(%s)",str,arg or "")
+ report_lpath("error in finalizer: %s(%s)",str,arg or "")
return false
end
@@ -5860,7 +6970,7 @@ local function lshow(parsed)
end
local s = table.serialize_functions -- ugly
table.serialize_functions = false -- ugly
- logs.report("lpath","%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,table.serialize(parsed,false))
+ report_lpath("%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,table.serialize(parsed,false))
table.serialize_functions = s -- ugly
end
@@ -5890,7 +7000,7 @@ parse_pattern = function (pattern) -- the gain of caching is rather minimal
local np = #parsed
if np == 0 then
parsed = { pattern = pattern, register_self, state = "parsing error" }
- logs.report("lpath","parsing error in '%s'",pattern)
+ report_lpath("parsing error in '%s'",pattern)
lshow(parsed)
else
-- we could have done this with a more complex parser but this
@@ -5994,32 +7104,32 @@ local function traced_apply(list,parsed,nofparsed,order)
if trace_lparse then
lshow(parsed)
end
- logs.report("lpath", "collecting : %s",parsed.pattern)
- logs.report("lpath", " root tags : %s",tagstostring(list))
- logs.report("lpath", " order : %s",order or "unset")
+ report_lpath("collecting : %s",parsed.pattern)
+ report_lpath(" root tags : %s",tagstostring(list))
+ report_lpath(" order : %s",order or "unset")
local collected = list
for i=1,nofparsed do
local pi = parsed[i]
local kind = pi.kind
if kind == "axis" then
collected = apply_axis[pi.axis](collected)
- logs.report("lpath", "% 10i : ax : %s",(collected and #collected) or 0,pi.axis)
+ report_lpath("% 10i : ax : %s",(collected and #collected) or 0,pi.axis)
elseif kind == "nodes" then
collected = apply_nodes(collected,pi.nodetest,pi.nodes)
- logs.report("lpath", "% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest))
+ report_lpath("% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest))
elseif kind == "expression" then
collected = apply_expression(collected,pi.evaluator,order)
- logs.report("lpath", "% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted)
+ report_lpath("% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted)
elseif kind == "finalizer" then
collected = pi.finalizer(collected)
- logs.report("lpath", "% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
+ report_lpath("% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
return collected
end
if not collected or #collected == 0 then
local pn = i < nofparsed and parsed[nofparsed]
if pn and pn.kind == "finalizer" then
collected = pn.finalizer(collected)
- logs.report("lpath", "% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "")
+ report_lpath("% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "")
return collected
end
return nil
@@ -6132,7 +7242,7 @@ expressions.boolean = toboolean
-- user interface
local function traverse(root,pattern,handle)
- logs.report("xml","use 'xml.selection' instead for '%s'",pattern)
+ report_lpath("use 'xml.selection' instead for '%s'",pattern)
local collected = parse_apply({ root },pattern)
if collected then
for c=1,#collected do
@@ -6180,7 +7290,7 @@ local function dofunction(collected,fnc)
f(collected[c])
end
else
- logs.report("xml","unknown function '%s'",fnc)
+ report_lpath("unknown function '%s'",fnc)
end
end
end
@@ -6372,7 +7482,6 @@ local function xmlgsub(t,old,new) -- will be replaced
end
end
---~ xml.gsub = xmlgsub
function xml.strip_leading_spaces(dk,d,k) -- cosmetic, for manual
if d and k then
@@ -6384,12 +7493,7 @@ function xml.strip_leading_spaces(dk,d,k) -- cosmetic, for manual
end
end
---~ xml.escapes = { ['&'] = '&amp;', ['<'] = '&lt;', ['>'] = '&gt;', ['"'] = '&quot;' }
---~ xml.unescapes = { } for k,v in next, xml.escapes do xml.unescapes[v] = k end
---~ function xml.escaped (str) return (gsub(str,"(.)" , xml.escapes )) end
---~ function xml.unescaped(str) return (gsub(str,"(&.-;)", xml.unescapes)) end
---~ function xml.cleansed (str) return (gsub(str,"<.->" , '' )) end -- "%b<>"
local P, S, R, C, V, Cc, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Cs
@@ -6455,6 +7559,8 @@ if not modules then modules = { } end modules ['lxml-aux'] = {
local trace_manipulations = false trackers.register("lxml.manipulations", function(v) trace_manipulations = v end)
+local report_xml = logs.new("xml")
+
local xmlparseapply, xmlconvert, xmlcopy, xmlname = xml.parse_apply, xml.convert, xml.copy, xml.name
local xmlinheritedconvert = xml.inheritedconvert
@@ -6463,7 +7569,7 @@ local insert, remove = table.insert, table.remove
local gmatch, gsub = string.gmatch, string.gsub
local function report(what,pattern,c,e)
- logs.report("xml","%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
+ report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
end
local function withelements(e,handle,depth)
@@ -6616,12 +7722,7 @@ local function xmltoelement(whatever,root)
return whatever -- string
end
if element then
- --~ if element.ri then
- --~ element = element.dt[element.ri].dt
- --~ else
- --~ element = element.dt
- --~ end
- end
+ end
return element
end
@@ -6760,9 +7861,6 @@ local function include(xmldata,pattern,attribute,recursive,loaddata)
-- for the moment hard coded
epdt[ek.ni] = xml.escaped(data) -- d[k] = xml.escaped(data)
else
---~ local settings = xmldata.settings
---~ settings.parent_root = xmldata -- to be tested
---~ local xi = xmlconvert(data,settings)
local xi = xmlinheritedconvert(data,xmldata)
if not xi then
epdt[ek.ni] = "" -- xml.empty(d,k)
@@ -6779,28 +7877,7 @@ end
xml.include = include
---~ local function manipulate(xmldata,pattern,manipulator) -- untested and might go away
---~ local collected = xmlparseapply({ xmldata },pattern)
---~ if collected then
---~ local xmltostring = xml.tostring
---~ for c=1,#collected do
---~ local e = collected[c]
---~ local data = manipulator(xmltostring(e))
---~ if data == "" then
---~ epdt[e.ni] = ""
---~ else
---~ local xi = xmlinheritedconvert(data,xmldata)
---~ if not xi then
---~ epdt[e.ni] = ""
---~ else
---~ epdt[e.ni] = xml.body(xi) -- xml.assign(d,k,xi)
---~ end
---~ end
---~ end
---~ end
---~ end
-
---~ xml.manipulate = manipulate
+
function xml.strip_whitespace(root, pattern, nolines) -- strips all leading and trailing space !
local collected = xmlparseapply({ root },pattern)
@@ -6826,8 +7903,7 @@ function xml.strip_whitespace(root, pattern, nolines) -- strips all leading and
end
end
else
- --~ str.ni = i
- t[#t+1] = str
+ t[#t+1] = str
end
end
e.dt = t
@@ -7285,825 +8361,1137 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['luat-env'] = {
+if not modules then modules = { } end modules ['data-ini'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ license = "see context related readme files",
}
--- A former version provided functionality for non embeded core
--- scripts i.e. runtime library loading. Given the amount of
--- Lua code we use now, this no longer makes sense. Much of this
--- evolved before bytecode arrays were available and so a lot of
--- code has disappeared already.
+local gsub, find, gmatch = string.gsub, string.find, string.gmatch
+local concat = table.concat
+local next, type = next, type
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join
-local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find
-local unquote, quote = string.unquote, string.quote
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end)
+local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
--- precautions
+local report_resolvers = logs.new("resolvers")
-os.setlocale(nil,nil) -- useless feature and even dangerous in luatex
+local ostype, osname, ossetenv, osgetenv = os.type, os.name, os.setenv, os.getenv
-function os.setlocale()
- -- no way you can mess with it
-end
-
--- dirty tricks
-
-if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaonly" then
- arg[-1]=arg[0] arg[0]=arg[2] for k=3,#arg do arg[k-2]=arg[k] end arg[#arg]=nil arg[#arg]=nil
-end
+-- The code here used to be part of a data-res but for convenience
+-- we now split it over multiple files. As this file is now the
+-- starting point we introduce resolvers here.
-if profiler and os.env["MTX_PROFILE_RUN"] == "YES" then
- profiler.start("luatex-profile.log")
-end
+resolvers = resolvers or { }
--- environment
+-- We don't want the kpse library to kick in. Also, we want to be able to
+-- execute programs. Control over execution is implemented later.
-environment = environment or { }
-environment.arguments = { }
-environment.files = { }
-environment.sortedflags = nil
+texconfig.kpse_init = false
+texconfig.shell_escape = 't'
-if not environment.jobname or environment.jobname == "" then if tex then environment.jobname = tex.jobname end end
-if not environment.version or environment.version == "" then environment.version = "unknown" end
-if not environment.jobname then environment.jobname = "unknown" end
+kpse = { original = kpse }
-function environment.initialize_arguments(arg)
- local arguments, files = { }, { }
- environment.arguments, environment.files, environment.sortedflags = arguments, files, nil
- for index=1,#arg do
- local argument = arg[index]
- if index > 0 then
- local flag, value = match(argument,"^%-+(.-)=(.-)$")
- if flag then
- arguments[flag] = unquote(value or "")
- else
- flag = match(argument,"^%-+(.+)")
- if flag then
- arguments[flag] = true
- else
- files[#files+1] = argument
- end
+setmetatable(kpse, {
+ __index = function(kp,name)
+ local r = resolvers[name]
+ if not r then
+ r = function (...)
+ report_resolvers("not supported: %s(%s)",name,concat(...))
end
+ rawset(kp,name,r)
end
+ return r
end
- environment.ownname = environment.ownname or arg[0] or 'unknown.lua'
+} )
+
+-- First we check a couple of environment variables. Some might be
+-- set already but we need then later on. We start with the system
+-- font path.
+
+do
+
+ local osfontdir = osgetenv("OSFONTDIR")
+
+ if osfontdir and osfontdir ~= "" then
+ -- ok
+ elseif osname == "windows" then
+ ossetenv("OSFONTDIR","c:/windows/fonts//")
+ elseif osname == "macosx" then
+ ossetenv("OSFONTDIR","$HOME/Library/Fonts//;/Library/Fonts//;/System/Library/Fonts//")
+ end
+
end
-function environment.setargument(name,value)
- environment.arguments[name] = value
+-- Next comes the user's home path. We need this as later on we have
+-- to replace ~ with its value.
+
+do
+
+ local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or '~'
+
+ homedir = file.collapse_path(homedir)
+
+ ossetenv("HOME", homedir) -- can be used in unix cnf files
+ ossetenv("USERPROFILE",homedir) -- can be used in windows cnf files
+
+ environment.homedir = homedir
+
end
--- todo: defaults, better checks e.g on type (boolean versus string)
---
--- tricky: too many hits when we support partials unless we add
--- a registration of arguments so from now on we have 'partial'
+-- The following code sets the name of the own binary and its
+-- path. This is fallback code as we have os.selfdir now.
-function environment.argument(name,partial)
- local arguments, sortedflags = environment.arguments, environment.sortedflags
- if arguments[name] then
- return arguments[name]
- elseif partial then
- if not sortedflags then
- sortedflags = table.sortedkeys(arguments)
- for k=1,#sortedflags do
- sortedflags[k] = "^" .. sortedflags[k]
- end
- environment.sortedflags = sortedflags
+do
+
+ local args = environment.original_arguments or arg -- this needs a cleanup
+
+ local ownbin = environment.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex"
+ local ownpath = environment.ownpath or os.selfdir
+
+ ownbin = file.collapse_path(ownbin)
+ ownpath = file.collapse_path(ownpath)
+
+ if not ownpath or ownpath == "" or ownpath == "unset" then
+ ownpath = args[-1] or arg[-1]
+ ownpath = ownpath and filedirname(gsub(ownpath,"\\","/"))
+ if not ownpath or ownpath == "" then
+ ownpath = args[-0] or arg[-0]
+ ownpath = ownpath and filedirname(gsub(ownpath,"\\","/"))
end
- -- example of potential clash: ^mode ^modefile
- for k=1,#sortedflags do
- local v = sortedflags[k]
- if find(name,v) then
- return arguments[sub(v,2,#v)]
+ local binary = ownbin
+ if not ownpath or ownpath == "" then
+ ownpath = ownpath and filedirname(binary)
+ end
+ if not ownpath or ownpath == "" then
+ if os.binsuffix ~= "" then
+ binary = file.replacesuffix(binary,os.binsuffix)
+ end
+ local path = osgetenv("PATH")
+ if path then
+ for p in gmatch(path,"[^"..io.pathseparator.."]+") do
+ local b = filejoin(p,binary)
+ if lfs.isfile(b) then
+ -- we assume that after changing to the path the currentdir function
+ -- resolves to the real location and use this side effect here; this
+ -- trick is needed because on the mac installations use symlinks in the
+ -- path instead of real locations
+ local olddir = lfs.currentdir()
+ if lfs.chdir(p) then
+ local pp = lfs.currentdir()
+ if trace_locating and p ~= pp then
+ report_resolvers("following symlink '%s' to '%s'",p,pp)
+ end
+ ownpath = pp
+ lfs.chdir(olddir)
+ else
+ if trace_locating then
+ report_resolvers("unable to check path '%s'",p)
+ end
+ ownpath = p
+ end
+ break
+ end
+ end
end
end
+ if not ownpath or ownpath == "" then
+ ownpath = "."
+ report_resolvers("forcing fallback ownpath .")
+ elseif trace_locating then
+ report_resolvers("using ownpath '%s'",ownpath)
+ end
end
- return nil
+
+ environment.ownbin = ownbin
+ environment.ownpath = ownpath
+
end
-environment.argument("x",true)
+resolvers.ownpath = environment.ownpath
-function environment.split_arguments(separator) -- rather special, cut-off before separator
- local done, before, after = false, { }, { }
- local original_arguments = environment.original_arguments
- for k=1,#original_arguments do
- local v = original_arguments[k]
- if not done and v == separator then
- done = true
- elseif done then
- after[#after+1] = v
- else
- before[#before+1] = v
- end
- end
- return before, after
+function resolvers.getownpath()
+ return environment.ownpath
end
-function environment.reconstruct_commandline(arg,noquote)
- arg = arg or environment.original_arguments
- if noquote and #arg == 1 then
- local a = arg[1]
- a = resolvers.resolve(a)
- a = unquote(a)
- return a
- elseif #arg > 0 then
- local result = { }
- for i=1,#arg do
- local a = arg[i]
- a = resolvers.resolve(a)
- a = unquote(a)
- a = gsub(a,'"','\\"') -- tricky
- if find(a," ") then
- result[#result+1] = quote(a)
- else
- result[#result+1] = a
- end
- end
- return table.join(result," ")
+-- The self variables permit us to use only a few (or even no)
+-- environment variables.
+
+do
+
+ local ownpath = environment.ownpath or dir.current()
+
+ if ownpath then
+ ossetenv('SELFAUTOLOC', file.collapse_path(ownpath))
+ ossetenv('SELFAUTODIR', file.collapse_path(ownpath .. "/.."))
+ ossetenv('SELFAUTOPARENT', file.collapse_path(ownpath .. "/../.."))
else
- return ""
+ report_resolvers("error: unable to locate ownpath")
+ os.exit()
end
+
end
-if arg then
+-- The running os:
- -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later)
- local newarg, instring = { }, false
+-- todo: check is context sits here os.platform is more trustworthy
+-- that the bin check as mtx-update runs from another path
- for index=1,#arg do
- local argument = arg[index]
- if find(argument,"^\"") then
- newarg[#newarg+1] = gsub(argument,"^\"","")
- if not find(argument,"\"$") then
- instring = true
- end
- elseif find(argument,"\"$") then
- newarg[#newarg] = newarg[#newarg] .. " " .. gsub(argument,"\"$","")
- instring = false
- elseif instring then
- newarg[#newarg] = newarg[#newarg] .. " " .. argument
- else
- newarg[#newarg+1] = argument
- end
- end
- for i=1,-5,-1 do
- newarg[i] = arg[i]
- end
+local texos = environment.texos or osgetenv("TEXOS")
+local texmfos = environment.texmfos or osgetenv('SELFAUTODIR')
- environment.initialize_arguments(newarg)
- environment.original_arguments = newarg
- environment.raw_arguments = arg
+if not texos or texos == "" then
+ texos = file.basename(texmfos)
+end
- arg = { } -- prevent duplicate handling
+ossetenv('TEXMFOS', texmfos) -- full bin path
+ossetenv('TEXOS', texos) -- partial bin parent
+ossetenv('SELFAUTOSYSTEM',os.platform) -- bonus
-end
+environment.texos = texos
+environment.texmfos = texmfos
--- weird place ... depends on a not yet loaded module
+-- The current root:
-function environment.texfile(filename)
- return resolvers.find_file(filename,'tex')
-end
+local texroot = environment.texroot or osgetenv("TEXROOT")
-function environment.luafile(filename)
- local resolved = resolvers.find_file(filename,'tex') or ""
- if resolved ~= "" then
- return resolved
- end
- resolved = resolvers.find_file(filename,'texmfscripts') or ""
- if resolved ~= "" then
- return resolved
- end
- return resolvers.find_file(filename,'luatexlibs') or ""
+if not texroot or texroot == "" then
+ texroot = osgetenv('SELFAUTOPARENT')
+ ossetenv('TEXROOT',texroot)
end
-environment.loadedluacode = loadfile -- can be overloaded
+environment.texroot = file.collapse_path(texroot)
---~ function environment.loadedluacode(name)
---~ if os.spawn("texluac -s -o texluac.luc " .. name) == 0 then
---~ local chunk = loadstring(io.loaddata("texluac.luc"))
---~ os.remove("texluac.luc")
---~ return chunk
---~ else
---~ environment.loadedluacode = loadfile -- can be overloaded
---~ return loadfile(name)
---~ end
---~ end
-
-function environment.luafilechunk(filename) -- used for loading lua bytecode in the format
- filename = file.replacesuffix(filename, "lua")
- local fullname = environment.luafile(filename)
- if fullname and fullname ~= "" then
- if trace_locating then
- logs.report("fileio","loading file %s", fullname)
- end
- return environment.loadedluacode(fullname)
- else
- if trace_locating then
- logs.report("fileio","unknown file %s", filename)
- end
- return nil
+-- Tracing. Todo ...
+
+function resolvers.settrace(n) -- no longer number but: 'locating' or 'detail'
+ if n then
+ trackers.disable("resolvers.*")
+ trackers.enable("resolvers."..n)
end
end
--- the next ones can use the previous ones / combine
+resolvers.settrace(osgetenv("MTX_INPUT_TRACE"))
-function environment.loadluafile(filename, version)
- local lucname, luaname, chunk
- local basename = file.removesuffix(filename)
- if basename == filename then
- lucname, luaname = basename .. ".luc", basename .. ".lua"
- else
- lucname, luaname = nil, basename -- forced suffix
- end
- -- when not overloaded by explicit suffix we look for a luc file first
- local fullname = (lucname and environment.luafile(lucname)) or ""
- if fullname ~= "" then
- if trace_locating then
- logs.report("fileio","loading %s", fullname)
- end
- chunk = loadfile(fullname) -- this way we don't need a file exists check
- end
- if chunk then
- assert(chunk)()
- if version then
- -- we check of the version number of this chunk matches
- local v = version -- can be nil
- if modules and modules[filename] then
- v = modules[filename].version -- new method
- elseif versions and versions[filename] then
- v = versions[filename] -- old method
- end
- if v == version then
- return true
- else
- if trace_locating then
- logs.report("fileio","version mismatch for %s: lua=%s, luc=%s", filename, v, version)
- end
- environment.loadluafile(filename)
- end
- else
- return true
- end
- end
- fullname = (luaname and environment.luafile(luaname)) or ""
- if fullname ~= "" then
- if trace_locating then
- logs.report("fileio","loading %s", fullname)
- end
- chunk = loadfile(fullname) -- this way we don't need a file exists check
- if not chunk then
- if trace_locating then
- logs.report("fileio","unknown file %s", filename)
- end
- else
- assert(chunk)()
- return true
- end
- end
- return false
-end
+-- todo:
+
+-- if profiler and osgetenv("MTX_PROFILE_RUN") == "YES" then
+-- profiler.start("luatex-profile.log")
+-- end
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['trac-inf'] = {
+if not modules then modules = { } end modules ['data-exp'] = {
version = 1.001,
- comment = "companion to trac-inf.mkiv",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ license = "see context related readme files",
}
-local format = string.format
+local format, gsub, find, gmatch, lower = string.format, string.gsub, string.find, string.gmatch, string.lower
+local concat, sort = table.concat, table.sort
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local lpegCt, lpegCs, lpegP, lpegC, lpegS = lpeg.Ct, lpeg.Cs, lpeg.P, lpeg.C, lpeg.S
+local type, next = type, next
-local statusinfo, n, registered = { }, 0, { }
+local ostype = os.type
+local collapse_path = file.collapse_path
-statistics = statistics or { }
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
-statistics.enable = true
-statistics.threshold = 0.05
+local report_resolvers = logs.new("resolvers")
--- timing functions
+-- As this bit of code is somewhat special it gets its own module. After
+-- all, when working on the main resolver code, I don't want to scroll
+-- past this every time.
-local clock = os.gettimeofday or os.clock
+-- {a,b,c,d}
+-- a,b,c/{p,q,r},d
+-- a,b,c/{p,q,r}/d/{x,y,z}//
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
+
+-- this one is better and faster, but it took me a while to realize
+-- that this kind of replacement is cleaner than messy parsing and
+-- fuzzy concatenating we can probably gain a bit with selectively
+-- applying lpeg, but experiments with lpeg parsing this proved not to
+-- work that well; the parsing is ok, but dealing with the resulting
+-- table is a pain because we need to work inside-out recursively
-local notimer
+local dummy_path_expr = "^!*unset/*$"
-function statistics.hastimer(instance)
- return instance and instance.starttime
+local function do_first(a,b)
+ local t = { }
+ for s in gmatch(b,"[^,]+") do t[#t+1] = a .. s end
+ return "{" .. concat(t,",") .. "}"
end
-function statistics.resettiming(instance)
- if not instance then
- notimer = { timing = 0, loadtime = 0 }
- else
- instance.timing, instance.loadtime = 0, 0
+local function do_second(a,b)
+ local t = { }
+ for s in gmatch(a,"[^,]+") do t[#t+1] = s .. b end
+ return "{" .. concat(t,",") .. "}"
+end
+
+local function do_both(a,b)
+ local t = { }
+ for sa in gmatch(a,"[^,]+") do
+ for sb in gmatch(b,"[^,]+") do
+ t[#t+1] = sa .. sb
+ end
end
+ return "{" .. concat(t,",") .. "}"
+end
+
+local function do_three(a,b,c)
+ return a .. b.. c
end
-function statistics.starttiming(instance)
- if not instance then
- notimer = { }
- instance = notimer
+local stripper_1 = lpeg.stripper("{}@")
+
+local replacer_1 = lpeg.replacer {
+ { ",}", ",@}" },
+ { "{,", "{@," },
+}
+
+local function splitpathexpr(str, newlist, validate)
+ -- no need for further optimization as it is only called a
+ -- few times, we can use lpeg for the sub
+ if trace_expansions then
+ report_resolvers("expanding variable '%s'",str)
end
- local it = instance.timing
- if not it then
- it = 0
+ local t, ok, done = newlist or { }, false, false
+ str = lpegmatch(replacer_1,str)
+ while true do
+ done = false
+ while true do
+ str, ok = gsub(str,"([^{},]+){([^{}]+)}",do_first)
+ if ok > 0 then done = true else break end
+ end
+ while true do
+ str, ok = gsub(str,"{([^{}]+)}([^{},]+)",do_second)
+ if ok > 0 then done = true else break end
+ end
+ while true do
+ str, ok = gsub(str,"{([^{}]+)}{([^{}]+)}",do_both)
+ if ok > 0 then done = true else break end
+ end
+ str, ok = gsub(str,"({[^{}]*){([^{}]+)}([^{}]*})",do_three)
+ if ok > 0 then done = true end
+ if not done then break end
end
- if it == 0 then
- instance.starttime = clock()
- if not instance.loadtime then
- instance.loadtime = 0
+ str = lpegmatch(stripper_1,str)
+ if validate then
+ for s in gmatch(str,"[^,]+") do
+ s = validate(s)
+ if s then t[#t+1] = s end
end
else
---~ logs.report("system","nested timing (%s)",tostring(instance))
- end
- instance.timing = it + 1
-end
-
-function statistics.stoptiming(instance, report)
- if not instance then
- instance = notimer
+ for s in gmatch(str,"[^,]+") do
+ t[#t+1] = s
+ end
end
- if instance then
- local it = instance.timing
- if it > 1 then
- instance.timing = it - 1
- else
- local starttime = instance.starttime
- if starttime then
- local stoptime = clock()
- local loadtime = stoptime - starttime
- instance.stoptime = stoptime
- instance.loadtime = instance.loadtime + loadtime
- if report then
- statistics.report("load time %0.3f",loadtime)
- end
- instance.timing = 0
- return loadtime
- end
+ if trace_expansions then
+ for k=1,#t do
+ report_resolvers("% 4i: %s",k,t[k])
end
end
- return 0
+ return t
end
-function statistics.elapsedtime(instance)
- if not instance then
- instance = notimer
+local function validate(s)
+ local isrecursive = find(s,"//$")
+ s = collapse_path(s)
+ if isrecursive then
+ s = s .. "//"
end
- return format("%0.3f",(instance and instance.loadtime) or 0)
+ return s ~= "" and not find(s,dummy_path_expr) and s
end
-function statistics.elapsedindeed(instance)
- if not instance then
- instance = notimer
+resolvers.validated_path = validate -- keeps the trailing //
+
+function resolvers.expanded_path_from_list(pathlist) -- maybe not a list, just a path
+ -- a previous version fed back into pathlist
+ local newlist, ok = { }, false
+ for k=1,#pathlist do
+ if find(pathlist[k],"[{}]") then
+ ok = true
+ break
+ end
end
- local t = (instance and instance.loadtime) or 0
- return t > statistics.threshold
+ if ok then
+ for k=1,#pathlist do
+ splitpathexpr(pathlist[k],newlist,validate)
+ end
+ else
+ for k=1,#pathlist do
+ for p in gmatch(pathlist[k],"([^,]+)") do
+ p = validate(p)
+ if p ~= "" then newlist[#newlist+1] = p end
+ end
+ end
+ end
+ return newlist
end
-function statistics.elapsedseconds(instance,rest) -- returns nil if 0 seconds
- if statistics.elapsedindeed(instance) then
- return format("%s seconds %s", statistics.elapsedtime(instance),rest or "")
- end
+-- We also put some cleanup code here.
+
+local cleanup -- used recursively
+
+cleanup = lpeg.replacer {
+ { "!", "" },
+ { "\\", "/" },
+ { "~" , function() return lpegmatch(cleanup,environment.homedir) end },
+}
+
+function resolvers.clean_path(str)
+ return str and lpegmatch(cleanup,str)
end
--- general function
+-- This one strips quotes and funny tokens.
-function statistics.register(tag,fnc)
- if statistics.enable and type(fnc) == "function" then
- local rt = registered[tag] or (#statusinfo + 1)
- statusinfo[rt] = { tag, fnc }
- registered[tag] = rt
- if #tag > n then n = #tag end
- end
+
+local expandhome = lpegP("~") / "$HOME" -- environment.homedir
+
+local dodouble = lpegP('"')/"" * (expandhome + (1 - lpegP('"')))^0 * lpegP('"')/""
+local dosingle = lpegP("'")/"" * (expandhome + (1 - lpegP("'")))^0 * lpegP("'")/""
+local dostring = (expandhome + 1 )^0
+
+local stripper = lpegCs(
+ lpegpatterns.unspacer * (dosingle + dodouble + dostring) * lpegpatterns.unspacer
+)
+
+function resolvers.checked_variable(str) -- assumes str is a string
+ return lpegmatch(stripper,str) or str
end
-function statistics.show(reporter)
- if statistics.enable then
- if not reporter then reporter = function(tag,data,n) texio.write_nl(tag .. " " .. data) end end
- -- this code will move
- local register = statistics.register
- register("luatex banner", function()
- return string.lower(status.banner)
- end)
- register("control sequences", function()
- return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
- end)
- register("callbacks", function()
- local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
- return format("direct: %s, indirect: %s, total: %s", total-indirect, indirect, total)
- end)
- register("current memory usage", statistics.memused)
- register("runtime",statistics.runtime)
--- --
- for i=1,#statusinfo do
- local s = statusinfo[i]
- local r = s[2]()
- if r then
- reporter(s[1],r,n)
+-- The path splitter:
+
+-- A config (optionally) has the paths split in tables. Internally
+-- we join them and split them after the expansion has taken place. This
+-- is more convenient.
+
+
+local cache = { }
+
+local splitter = lpegCt(lpeg.splitat(lpegS(ostype == "windows" and ";" or ":;"))) -- maybe add ,
+
+local function split_configuration_path(str) -- beware, this can be either a path or a { specification }
+ if str then
+ local found = cache[str]
+ if not found then
+ if str == "" then
+ found = { }
+ else
+ str = gsub(str,"\\","/")
+ local split = lpegmatch(splitter,str)
+ found = { }
+ for i=1,#split do
+ local s = split[i]
+ if not find(s,"^{*unset}*") then
+ found[#found+1] = s
+ end
+ end
+ if trace_expansions then
+ report_resolvers("splitting path specification '%s'",str)
+ for k=1,#found do
+ report_resolvers("% 4i: %s",k,found[k])
+ end
+ end
+ cache[str] = found
end
end
- texio.write_nl("") -- final newline
- statistics.enable = false
+ return found
end
end
-function statistics.show_job_stat(tag,data,n)
- texio.write_nl(format("%-15s: %s - %s","mkiv lua stats",tag:rpadd(n," "),data))
-end
-
-function statistics.memused() -- no math.round yet -)
- local round = math.round or math.floor
- return format("%s MB (ctx: %s MB)",round(collectgarbage("count")/1000), round(status.luastate_bytes/1000000))
-end
+resolvers.split_configuration_path = split_configuration_path
-if statistics.runtime then
- -- already loaded and set
-elseif luatex and luatex.starttime then
- statistics.starttime = luatex.starttime
- statistics.loadtime = 0
- statistics.timing = 0
-else
- statistics.starttiming(statistics)
+function resolvers.split_path(str)
+ if type(str) == 'table' then
+ return str
+ else
+ return split_configuration_path(str)
+ end
end
-function statistics.runtime()
- statistics.stoptiming(statistics)
- return statistics.formatruntime(statistics.elapsedtime(statistics))
+function resolvers.join_path(str)
+ if type(str) == 'table' then
+ return file.join_path(str)
+ else
+ return str
+ end
end
-function statistics.formatruntime(runtime)
- return format("%s seconds", statistics.elapsedtime(statistics))
-end
+-- The next function scans directories and returns a hash where the
+-- entries are either strings or tables.
-function statistics.timed(action,report)
- local timer = { }
- report = report or logs.simple
- statistics.starttiming(timer)
- action()
- statistics.stoptiming(timer)
- report("total runtime: %s",statistics.elapsedtime(timer))
-end
+-- starting with . or .. etc or funny char
--- where, not really the best spot for this:
-commands = commands or { }
-local timer
-function commands.resettimer()
- statistics.resettiming(timer)
- statistics.starttiming(timer)
-end
+local weird = lpegP(".")^1 + lpeg.anywhere(lpegS("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
-function commands.elapsedtime()
- statistics.stoptiming(timer)
- tex.sprint(statistics.elapsedtime(timer))
+function resolvers.scan_files(specification)
+ if trace_locating then
+ report_resolvers("scanning path '%s'",specification)
+ end
+ local attributes, directory = lfs.attributes, lfs.dir
+ local files = { __path__ = specification }
+ local n, m, r = 0, 0, 0
+ local function scan(spec,path)
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ n = n + 1
+ local f = files[name]
+ if f then
+ if type(f) == 'string' then
+ files[name] = { f, path }
+ else
+ f[#f+1] = path
+ end
+ else -- probably unique anyway
+ files[name] = path
+ local lower = lower(name)
+ if name ~= lower then
+ files["remap:"..lower] = name
+ r = r + 1
+ end
+ end
+ elseif mode == 'directory' then
+ m = m + 1
+ if path ~= "" then
+ dirs[#dirs+1] = path..'/'..name
+ else
+ dirs[#dirs+1] = name
+ end
+ end
+ end
+ end
+ if #dirs > 0 then
+ sort(dirs)
+ for i=1,#dirs do
+ scan(spec,dirs[i])
+ end
+ end
+ end
+ scan(specification .. '/',"")
+ files.__files__, files.__directories__, files.__remappings__ = n, m, r
+ if trace_locating then
+ report_resolvers("%s files found on %s directories with %s uppercase remappings",n,m,r)
+ end
+ return files
end
-commands.resettimer()
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['trac-log'] = {
+if not modules then modules = { } end modules ['data-env'] = {
version = 1.001,
- comment = "companion to trac-log.mkiv",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ license = "see context related readme files",
}
--- this is old code that needs an overhaul
+local formats = { } resolvers.formats = formats
+local suffixes = { } resolvers.suffixes = suffixes
+local dangerous = { } resolvers.dangerous = dangerous
+local suffixmap = { } resolvers.suffixmap = suffixmap
+local alternatives = { } resolvers.alternatives = alternatives
+
+formats['afm'] = 'AFMFONTS' suffixes['afm'] = { 'afm' }
+formats['enc'] = 'ENCFONTS' suffixes['enc'] = { 'enc' }
+formats['fmt'] = 'TEXFORMATS' suffixes['fmt'] = { 'fmt' }
+formats['map'] = 'TEXFONTMAPS' suffixes['map'] = { 'map' }
+formats['mp'] = 'MPINPUTS' suffixes['mp'] = { 'mp' }
+formats['ocp'] = 'OCPINPUTS' suffixes['ocp'] = { 'ocp' }
+formats['ofm'] = 'OFMFONTS' suffixes['ofm'] = { 'ofm', 'tfm' }
+formats['otf'] = 'OPENTYPEFONTS' suffixes['otf'] = { 'otf' }
+formats['opl'] = 'OPLFONTS' suffixes['opl'] = { 'opl' }
+formats['otp'] = 'OTPINPUTS' suffixes['otp'] = { 'otp' }
+formats['ovf'] = 'OVFFONTS' suffixes['ovf'] = { 'ovf', 'vf' }
+formats['ovp'] = 'OVPFONTS' suffixes['ovp'] = { 'ovp' }
+formats['tex'] = 'TEXINPUTS' suffixes['tex'] = { 'tex' }
+formats['tfm'] = 'TFMFONTS' suffixes['tfm'] = { 'tfm' }
+formats['ttf'] = 'TTFONTS' suffixes['ttf'] = { 'ttf', 'ttc', 'dfont' }
+formats['pfb'] = 'T1FONTS' suffixes['pfb'] = { 'pfb', 'pfa' }
+formats['vf'] = 'VFFONTS' suffixes['vf'] = { 'vf' }
+formats['fea'] = 'FONTFEATURES' suffixes['fea'] = { 'fea' }
+formats['cid'] = 'FONTCIDMAPS' suffixes['cid'] = { 'cid', 'cidmap' }
+formats['texmfscripts'] = 'TEXMFSCRIPTS' suffixes['texmfscripts'] = { 'rb', 'pl', 'py' }
+formats['lua'] = 'LUAINPUTS' suffixes['lua'] = { 'lua', 'luc', 'tma', 'tmc' }
+formats['lib'] = 'CLUAINPUTS' suffixes['lib'] = (os.libsuffix and { os.libsuffix }) or { 'dll', 'so' }
---~ io.stdout:setvbuf("no")
---~ io.stderr:setvbuf("no")
-
-local write_nl, write = texio.write_nl or print, texio.write or io.write
-local format, gmatch = string.format, string.gmatch
-local texcount = tex and tex.count
+-- backward compatible ones
-if texlua then
- write_nl = print
- write = io.write
-end
+alternatives['map files'] = 'map'
+alternatives['enc files'] = 'enc'
+alternatives['cid maps'] = 'cid' -- great, why no cid files
+alternatives['font feature files'] = 'fea' -- and fea files here
+alternatives['opentype fonts'] = 'otf'
+alternatives['truetype fonts'] = 'ttf'
+alternatives['truetype collections'] = 'ttc'
+alternatives['truetype dictionary'] = 'dfont'
+alternatives['type1 fonts'] = 'pfb'
--[[ldx--
-<p>This is a prelude to a more extensive logging module. For the sake
-of parsing log files, in addition to the standard logging we will
-provide an <l n='xml'/> structured file. Actually, any logging that
-is hooked into callbacks will be \XML\ by default.</p>
+<p>If you wondered about some of the previous mappings, how about
+the next bunch:</p>
--ldx]]--
-logs = logs or { }
-logs.xml = logs.xml or { }
-logs.tex = logs.tex or { }
+-- kpse specific ones (a few omitted) .. we only add them for locating
+-- files that we don't use anyway
+
+formats['base'] = 'MFBASES' suffixes['base'] = { 'base', 'bas' }
+formats['bib'] = '' suffixes['bib'] = { 'bib' }
+formats['bitmap font'] = '' suffixes['bitmap font'] = { }
+formats['bst'] = '' suffixes['bst'] = { 'bst' }
+formats['cmap files'] = 'CMAPFONTS' suffixes['cmap files'] = { 'cmap' }
+formats['cnf'] = '' suffixes['cnf'] = { 'cnf' }
+formats['cweb'] = '' suffixes['cweb'] = { 'w', 'web', 'ch' }
+formats['dvips config'] = '' suffixes['dvips config'] = { }
+formats['gf'] = '' suffixes['gf'] = { '<resolution>gf' }
+formats['graphic/figure'] = '' suffixes['graphic/figure'] = { 'eps', 'epsi' }
+formats['ist'] = '' suffixes['ist'] = { 'ist' }
+formats['lig files'] = 'LIGFONTS' suffixes['lig files'] = { 'lig' }
+formats['ls-R'] = '' suffixes['ls-R'] = { }
+formats['mem'] = 'MPMEMS' suffixes['mem'] = { 'mem' }
+formats['MetaPost support'] = '' suffixes['MetaPost support'] = { }
+formats['mf'] = 'MFINPUTS' suffixes['mf'] = { 'mf' }
+formats['mft'] = '' suffixes['mft'] = { 'mft' }
+formats['misc fonts'] = '' suffixes['misc fonts'] = { }
+formats['other text files'] = '' suffixes['other text files'] = { }
+formats['other binary files'] = '' suffixes['other binary files'] = { }
+formats['pdftex config'] = 'PDFTEXCONFIG' suffixes['pdftex config'] = { }
+formats['pk'] = '' suffixes['pk'] = { '<resolution>pk' }
+formats['PostScript header'] = 'TEXPSHEADERS' suffixes['PostScript header'] = { 'pro' }
+formats['sfd'] = 'SFDFONTS' suffixes['sfd'] = { 'sfd' }
+formats['TeX system documentation'] = '' suffixes['TeX system documentation'] = { }
+formats['TeX system sources'] = '' suffixes['TeX system sources'] = { }
+formats['Troff fonts'] = '' suffixes['Troff fonts'] = { }
+formats['type42 fonts'] = 'T42FONTS' suffixes['type42 fonts'] = { }
+formats['web'] = '' suffixes['web'] = { 'web', 'ch' }
+formats['web2c files'] = 'WEB2C' suffixes['web2c files'] = { }
+formats['fontconfig files'] = 'FONTCONFIG_PATH' suffixes['fontconfig files'] = { } -- not unique
---[[ldx--
-<p>This looks pretty ugly but we need to speed things up a bit.</p>
---ldx]]--
+alternatives['subfont definition files'] = 'sfd'
-logs.moreinfo = [[
-more information about ConTeXt and the tools that come with it can be found at:
+-- A few accessors, mostly for command line tool.
-maillist : ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
-webpage : http://www.pragma-ade.nl / http://tex.aanhet.net
-wiki : http://contextgarden.net
-]]
+function resolvers.suffix_of_format(str)
+ local s = suffixes[str]
+ return s and s[1] or ""
+end
-logs.levels = {
- ['error'] = 1,
- ['warning'] = 2,
- ['info'] = 3,
- ['debug'] = 4,
-}
+function resolvers.suffixes_of_format(str)
+ return suffixes[str] or { }
+end
-logs.functions = {
- 'report', 'start', 'stop', 'push', 'pop', 'line', 'direct',
- 'start_run', 'stop_run',
- 'start_page_number', 'stop_page_number',
- 'report_output_pages', 'report_output_log',
- 'report_tex_stat', 'report_job_stat',
- 'show_open', 'show_close', 'show_load',
-}
+-- As we don't register additional suffixes anyway, we can as well
+-- freeze the reverse map here.
-logs.tracers = {
-}
+for name, suffixlist in next, suffixes do
+ for i=1,#suffixlist do
+ suffixmap[suffixlist[i]] = name
+ end
+end
-logs.level = 0
-logs.mode = string.lower((os.getenv("MTX.LOG.MODE") or os.getenv("MTX_LOG_MODE") or "tex"))
+setmetatable(suffixes, { __newindex = function(suffixes,name,suffixlist)
+ rawset(suffixes,name,suffixlist)
+ suffixes[name] = suffixlist
+ for i=1,#suffixlist do
+ suffixmap[suffixlist[i]] = name
+ end
+end } )
-function logs.set_level(level)
- logs.level = logs.levels[level] or level
+for name, format in next, formats do
+ dangerous[name] = true
end
-function logs.set_method(method)
- for _, v in next, logs.functions do
- logs[v] = logs[method][v] or function() end
- end
+-- because vf searching is somewhat dangerous, we want to prevent
+-- too liberal searching esp because we do a lookup on the current
+-- path anyway; only tex (or any) is safe
+
+dangerous.tex = nil
+
+
+-- more helpers
+
+function resolvers.format_of_var(str)
+ return formats[str] or formats[alternatives[str]] or ''
end
--- tex logging
+function resolvers.format_of_suffix(str) -- of file
+ return suffixmap[file.extname(str)] or 'tex'
+end
-function logs.tex.report(category,fmt,...) -- new
- if fmt then
- write_nl(category .. " | " .. format(fmt,...))
- else
- write_nl(category .. " |")
- end
+function resolvers.variable_of_format(str)
+ return formats[str] or formats[alternatives[str]] or ''
end
-function logs.tex.line(fmt,...) -- new
- if fmt then
- write_nl(format(fmt,...))
- else
- write_nl("")
+function resolvers.var_of_format_or_suffix(str)
+ local v = formats[str]
+ if v then
+ return v
+ end
+ v = formats[alternatives[str]]
+ if v then
+ return v
end
+ v = suffixmap[fileextname(str)]
+ if v then
+ return formats[v]
+ end
+ return ''
end
---~ function logs.tex.start_page_number()
---~ local real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
---~ if real > 0 then
---~ if user > 0 then
---~ if sub > 0 then
---~ write(format("[%s.%s.%s",real,user,sub))
---~ else
---~ write(format("[%s.%s",real,user))
---~ end
---~ else
---~ write(format("[%s",real))
---~ end
---~ else
---~ write("[-")
---~ end
---~ end
-
---~ function logs.tex.stop_page_number()
---~ write("]")
---~ end
-local real, user, sub
-function logs.tex.start_page_number()
- real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
-end
+end -- of closure
-function logs.tex.stop_page_number()
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- logs.report("pages", "flushing realpage %s, userpage %s, subpage %s",real,user,sub)
- else
- logs.report("pages", "flushing realpage %s, userpage %s",real,user)
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['data-tmp'] = {
+ version = 1.100,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--[[ldx--
+<p>This module deals with caching data. It sets up the paths and
+implements loaders and savers for tables. Best is to set the
+following variable. When not set, the usual paths will be
+checked. Personally I prefer the (users) temporary path.</p>
+
+</code>
+TEXMFCACHE=$TMP;$TEMP;$TMPDIR;$TEMPDIR;$HOME;$TEXMFVAR;$VARTEXMF;.
+</code>
+
+<p>Currently we do no locking when we write files. This is no real
+problem because most caching involves fonts and the chance of them
+being written at the same time is small. We also need to extend
+luatools with a recache feature.</p>
+--ldx]]--
+
+local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
+local mkdirs, isdir = dir.mkdirs, lfs.isdir
+
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end)
+
+local report_cache = logs.new("cache")
+
+local report_resolvers = logs.new("resolvers")
+
+caches = caches or { }
+
+caches.base = caches.base or "luatex-cache"
+caches.more = caches.more or "context"
+caches.direct = false -- true is faster but may need huge amounts of memory
+caches.tree = false
+caches.force = true
+caches.ask = false
+caches.defaults = { "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" }
+
+local writable, readables, usedreadables = nil, { }, { }
+
+-- we could use a metatable for writable and readable but not yet
+
+local function identify()
+ -- Combining the loops makes it messy. First we check the format cache path
+ -- and when the last component is not present we try to create it.
+ local texmfcaches = resolvers.clean_path_list("TEXMFCACHE")
+ if texmfcaches then
+ for k=1,#texmfcaches do
+ local cachepath = texmfcaches[k]
+ if cachepath ~= "" then
+ cachepath = resolvers.clean_path(cachepath)
+ cachepath = file.collapse_path(cachepath)
+ local valid = isdir(cachepath)
+ if valid then
+ if file.isreadable(cachepath) then
+ readables[#readables+1] = cachepath
+ if not writable and file.iswritable(cachepath) then
+ writable = cachepath
+ end
+ end
+ elseif not writable and caches.force then
+ local cacheparent = file.dirname(cachepath)
+ if file.iswritable(cacheparent) then
+ if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
+ mkdirs(cachepath)
+ if isdir(cachepath) and file.iswritable(cachepath) then
+ report_cache("created: %s",cachepath)
+ writable = cachepath
+ readables[#readables+1] = cachepath
+ end
+ end
+ end
+ end
end
- else
- logs.report("pages", "flushing realpage %s",real)
+ end
+ end
+ -- As a last resort we check some temporary paths but this time we don't
+ -- create them.
+ local texmfcaches = caches.defaults
+ if texmfcaches then
+ for k=1,#texmfcaches do
+ local cachepath = texmfcaches[k]
+ cachepath = resolvers.getenv(cachepath)
+ if cachepath ~= "" then
+ cachepath = resolvers.clean_path(cachepath)
+ local valid = isdir(cachepath)
+ if valid and file.isreadable(cachepath) then
+ if not writable and file.iswritable(cachepath) then
+ readables[#readables+1] = cachepath
+ writable = cachepath
+ break
+ end
+ end
+ end
+ end
+ end
+ -- Some extra checking. If we have no writable or readable path then we simply
+ -- quit.
+ if not writable then
+ report_cache("fatal error: there is no valid writable cache path defined")
+ os.exit()
+ elseif #readables == 0 then
+ report_cache("fatal error: there is no valid readable cache path defined")
+ os.exit()
+ end
+ -- why here
+ writable = dir.expand_name(resolvers.clean_path(writable)) -- just in case
+ -- moved here
+ local base, more, tree = caches.base, caches.more, caches.tree or caches.treehash() -- we have only one writable tree
+ if tree then
+ caches.tree = tree
+ writable = mkdirs(writable,base,more,tree)
+ for i=1,#readables do
+ readables[i] = file.join(readables[i],base,more,tree)
end
else
- logs.report("pages", "flushing page")
+ writable = mkdirs(writable,base,more)
+ for i=1,#readables do
+ readables[i] = file.join(readables[i],base,more)
+ end
end
- io.flush()
+ -- end
+ if trace_cache then
+ for i=1,#readables do
+ report_cache("using readable path '%s' (order %s)",readables[i],i)
+ end
+ report_cache("using writable path '%s'",writable)
+ end
+ identify = function()
+ return writable, readables
+ end
+ return writable, readables
end
-logs.tex.report_job_stat = statistics.show_job_stat
-
--- xml logging
-
-function logs.xml.report(category,fmt,...) -- new
- if fmt then
- write_nl(format("<r category='%s'>%s</r>",category,format(fmt,...)))
+function caches.usedpaths()
+ local writable, readables = identify()
+ if #readables > 1 then
+ local result = { }
+ for i=1,#readables do
+ local readable = readables[i]
+ if usedreadables[i] or readable == writable then
+ result[#result+1] = format("readable: '%s' (order %s)",readable,i)
+ end
+ end
+ result[#result+1] = format("writable: '%s'",writable)
+ return result
else
- write_nl(format("<r category='%s'/>",category))
+ return writable
end
end
-function logs.xml.line(fmt,...) -- new
- if fmt then
- write_nl(format("<r>%s</r>",format(fmt,...)))
+
+function caches.configfiles()
+ return table.concat(resolvers.instance.specification,";")
+end
+
+function caches.hashed(tree)
+ return md5.hex(gsub(lower(tree),"[\\\/]+","/"))
+end
+
+function caches.treehash()
+ local tree = caches.configfiles()
+ if not tree or tree == "" then
+ return false
else
- write_nl("<r/>")
+ return caches.hashed(tree)
end
end
-function logs.xml.start() if logs.level > 0 then tw("<%s>" ) end end
-function logs.xml.stop () if logs.level > 0 then tw("</%s>") end end
-function logs.xml.push () if logs.level > 0 then tw("<!-- ") end end
-function logs.xml.pop () if logs.level > 0 then tw(" -->" ) end end
+local r_cache, w_cache = { }, { } -- normally w in in r but who cares
-function logs.xml.start_run()
- write_nl("<?xml version='1.0' standalone='yes'?>")
- write_nl("<job>") -- xmlns='www.pragma-ade.com/luatex/schemas/context-job.rng'
- write_nl("")
+local function getreadablepaths(...) -- we can optimize this as we have at most 2 tags
+ local tags = { ... }
+ local hash = concat(tags,"/")
+ local done = r_cache[hash]
+ if not done then
+ local writable, readables = identify() -- exit if not found
+ if #tags > 0 then
+ done = { }
+ for i=1,#readables do
+ done[i] = file.join(readables[i],...)
+ end
+ else
+ done = readables
+ end
+ r_cache[hash] = done
+ end
+ return done
end
-function logs.xml.stop_run()
- write_nl("</job>")
+local function getwritablepath(...)
+ local tags = { ... }
+ local hash = concat(tags,"/")
+ local done = w_cache[hash]
+ if not done then
+ local writable, readables = identify() -- exit if not found
+ if #tags > 0 then
+ done = mkdirs(writable,...)
+ else
+ done = writable
+ end
+ w_cache[hash] = done
+ end
+ return done
end
-function logs.xml.start_page_number()
- write_nl(format("<p real='%s' page='%s' sub='%s'", texcount.realpageno, texcount.userpageno, texcount.subpageno))
-end
+caches.getreadablepaths = getreadablepaths
+caches.getwritablepath = getwritablepath
-function logs.xml.stop_page_number()
- write("/>")
- write_nl("")
+function caches.getfirstreadablefile(filename,...)
+ local rd = getreadablepaths(...)
+ for i=1,#rd do
+ local path = rd[i]
+ local fullname = file.join(path,filename)
+ if file.isreadable(fullname) then
+ usedreadables[i] = true
+ return fullname, path
+ end
+ end
+ return caches.setfirstwritablefile(filename,...)
end
-function logs.xml.report_output_pages(p,b)
- write_nl(format("<v k='pages' v='%s'/>", p))
- write_nl(format("<v k='bytes' v='%s'/>", b))
- write_nl("")
+function caches.setfirstwritablefile(filename,...)
+ local wr = getwritablepath(...)
+ local fullname = file.join(wr,filename)
+ return fullname, wr
end
-function logs.xml.report_output_log()
+function caches.define(category,subcategory) -- for old times sake
+ return function()
+ return getwritablepath(category,subcategory)
+ end
end
-function logs.xml.report_tex_stat(k,v)
- texiowrite_nl("log","<v k='"..k.."'>"..tostring(v).."</v>")
+function caches.setluanames(path,name)
+ return path .. "/" .. name .. ".tma", path .. "/" .. name .. ".tmc"
end
-local level = 0
-
-function logs.xml.show_open(name)
- level = level + 1
- texiowrite_nl(format("<f l='%s' n='%s'>",level,name))
+function caches.loaddata(readables,name)
+ if type(readables) == "string" then
+ readables = { readables }
+ end
+ for i=1,#readables do
+ local path = readables[i]
+ local tmaname, tmcname = caches.setluanames(path,name)
+ local loader = loadfile(tmcname) or loadfile(tmaname)
+ if loader then
+ loader = loader()
+ collectgarbage("step")
+ return loader
+ end
+ end
+ return false
end
-function logs.xml.show_close(name)
- texiowrite("</f> ")
- level = level - 1
+function caches.iswritable(filepath,filename)
+ local tmaname, tmcname = caches.setluanames(filepath,filename)
+ return file.iswritable(tmaname)
end
-function logs.xml.show_load(name)
- texiowrite_nl(format("<f l='%s' n='%s'/>",level+1,name))
+function caches.savedata(filepath,filename,data,raw)
+ local tmaname, tmcname = caches.setluanames(filepath,filename)
+ local reduce, simplify = true, true
+ if raw then
+ reduce, simplify = false, false
+ end
+ data.cache_uuid = os.uuid()
+ if caches.direct then
+ file.savedata(tmaname, table.serialize(data,'return',false,true,false)) -- no hex
+ else
+ table.tofile(tmaname, data,'return',false,true,false) -- maybe not the last true
+ end
+ local cleanup = resolvers.boolean_variable("PURGECACHE", false)
+ local strip = resolvers.boolean_variable("LUACSTRIP", true)
+ utils.lua.compile(tmaname, tmcname, cleanup, strip)
end
---
+-- moved from data-res:
-local name, banner = 'report', 'context'
+local content_state = { }
-local function report(category,fmt,...)
- if fmt then
- write_nl(format("%s | %s: %s",name,category,format(fmt,...)))
- elseif category then
- write_nl(format("%s | %s",name,category))
- else
- write_nl(format("%s |",name))
- end
+function caches.contentstate()
+ return content_state or { }
end
-local function simple(fmt,...)
- if fmt then
- write_nl(format("%s | %s",name,format(fmt,...)))
- else
- write_nl(format("%s |",name))
+function caches.loadcontent(cachename,dataname)
+ local name = caches.hashed(cachename)
+ local full, path = caches.getfirstreadablefile(name ..".lua","trees")
+ local filename = file.join(path,name)
+ local blob = loadfile(filename .. ".luc") or loadfile(filename .. ".lua")
+ if blob then
+ local data = blob()
+ if data and data.content and data.type == dataname and data.version == resolvers.cacheversion then
+ content_state[#content_state+1] = data.uuid
+ if trace_locating then
+ report_resolvers("loading '%s' for '%s' from '%s'",dataname,cachename,filename)
+ end
+ return data.content
+ elseif trace_locating then
+ report_resolvers("skipping '%s' for '%s' from '%s'",dataname,cachename,filename)
+ end
+ elseif trace_locating then
+ report_resolvers("skipping '%s' for '%s' from '%s'",dataname,cachename,filename)
end
end
-function logs.setprogram(_name_,_banner_,_verbose_)
- name, banner = _name_, _banner_
- if _verbose_ then
- trackers.enable("resolvers.locating")
- end
- logs.set_method("tex")
- logs.report = report -- also used in libraries
- logs.simple = simple -- only used in scripts !
- if utils then
- utils.report = simple
+function caches.collapsecontent(content)
+ for k, v in next, content do
+ if type(v) == "table" and #v == 1 then
+ content[k] = v[1]
+ end
end
- logs.verbose = _verbose_
end
-function logs.setverbose(what)
- if what then
- trackers.enable("resolvers.locating")
- else
- trackers.disable("resolvers.locating")
+function caches.savecontent(cachename,dataname,content)
+ local name = caches.hashed(cachename)
+ local full, path = caches.setfirstwritablefile(name ..".lua","trees")
+ local filename = file.join(path,name) -- is full
+ local luaname, lucname = filename .. ".lua", filename .. ".luc"
+ if trace_locating then
+ report_resolvers("preparing '%s' for '%s'",dataname,cachename)
+ end
+ local data = {
+ type = dataname,
+ root = cachename,
+ version = resolvers.cacheversion,
+ date = os.date("%Y-%m-%d"),
+ time = os.date("%H:%M:%S"),
+ content = content,
+ uuid = os.uuid(),
+ }
+ local ok = io.savedata(luaname,table.serialize(data,true))
+ if ok then
+ if trace_locating then
+ report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
+ end
+ if utils.lua.compile(luaname,lucname,false,true) then -- no cleanup but strip
+ if trace_locating then
+ report_resolvers("'%s' compiled to '%s'",dataname,lucname)
+ end
+ return true
+ else
+ if trace_locating then
+ report_resolvers("compiling failed for '%s', deleting file '%s'",dataname,lucname)
+ end
+ os.remove(lucname)
+ end
+ elseif trace_locating then
+ report_resolvers("unable to save '%s' in '%s' (access error)",dataname,luaname)
end
- logs.verbose = what or false
end
-function logs.extendbanner(_banner_,_verbose_)
- banner = banner .. " | ".. _banner_
- if _verbose_ ~= nil then
- logs.setverbose(what)
- end
-end
-logs.verbose = false
-logs.report = logs.tex.report
-logs.simple = logs.tex.report
-function logs.reportlines(str) -- todo: <lines></lines>
- for line in gmatch(str,"(.-)[\n\r]") do
- logs.report(line)
- end
-end
-function logs.reportline() -- for scripts too
- logs.report()
-end
+end -- of closure
-logs.simpleline = logs.reportline
+do -- create closure to overcome 200 locals limit
-function logs.reportbanner() -- for scripts too
- logs.report(banner)
-end
+if not modules then modules = { } end modules ['data-met'] = {
+ version = 1.100,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
-function logs.help(message,option)
- logs.reportbanner()
- logs.reportline()
- logs.reportlines(message)
- local moreinfo = logs.moreinfo or ""
- if moreinfo ~= "" and option ~= "nomoreinfo" then
- logs.reportline()
- logs.reportlines(moreinfo)
+local find = string.find
+
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+
+local report_resolvers = logs.new("resolvers")
+
+resolvers.locators = { notfound = { nil } } -- locate databases
+resolvers.hashers = { notfound = { nil } } -- load databases
+resolvers.generators = { notfound = { nil } } -- generate databases
+
+function resolvers.splitmethod(filename)
+ if not filename then
+ return { } -- safeguard
+ elseif type(filename) == "table" then
+ return filename -- already split
+ elseif not find(filename,"://") then
+ return { scheme="file", path = filename, original = filename } -- quick hack
+ else
+ return url.hashed(filename)
end
end
-logs.set_level('error')
-logs.set_method('tex')
-
-function logs.system(whereto,process,jobname,category,...)
- for i=1,10 do
- local f = io.open(whereto,"a")
- if f then
- f:write(format("%s %s => %s => %s => %s\r",os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...)))
- f:close()
- break
- else
- sleep(0.1)
+function resolvers.methodhandler(what, filename, filetype) -- ...
+ filename = file.collapse_path(filename)
+ local specification = (type(filename) == "string" and resolvers.splitmethod(filename)) or filename -- no or { }, let it bomb
+ local scheme = specification.scheme
+ local resolver = resolvers[what]
+ if resolver[scheme] then
+ if trace_locating then
+ report_resolvers("handler '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification))
end
+ return resolver[scheme](filename,filetype)
+ else
+ return resolver.tex(filename,filetype) -- todo: specification
end
end
---~ local syslogname = "oeps.xxx"
---~
---~ for i=1,10 do
---~ logs.system(syslogname,"context","test","fonts","font %s recached due to newer version (%s)","blabla","123")
---~ end
-
-function logs.fatal(where,...)
- logs.report(where,"fatal error: %s, aborting now",format(...))
- os.exit()
-end
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-inp'] = {
+if not modules then modules = { } end modules ['data-res'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -8111,70 +9499,45 @@ if not modules then modules = { } end modules ['data-inp'] = {
license = "see context related readme files",
}
--- After a few years using the code the large luat-inp.lua file
--- has been split up a bit. In the process some functionality was
--- dropped:
---
--- * support for reading lsr files
--- * selective scanning (subtrees)
--- * some public auxiliary functions were made private
---
--- TODO: os.getenv -> os.env[]
--- TODO: instances.[hashes,cnffiles,configurations,522]
--- TODO: check escaping in find etc, too much, too slow
-
--- This lib is multi-purpose and can be loaded again later on so that
--- additional functionality becomes available. We will split thislogs.report("fileio",
--- module in components once we're done with prototyping. This is the
--- first code I wrote for LuaTeX, so it needs some cleanup. Before changing
--- something in this module one can best check with Taco or Hans first; there
--- is some nasty trickery going on that relates to traditional kpse support.
-
--- To be considered: hash key lowercase, first entry in table filename
--- (any case), rest paths (so no need for optimization). Or maybe a
--- separate table that matches lowercase names to mixed case when
--- present. In that case the lower() cases can go away. I will do that
--- only when we run into problems with names ... well ... Iwona-Regular.
+-- In practice we will work within one tds tree, but i want to keep
+-- the option open to build tools that look at multiple trees, which is
+-- why we keep the tree specific data in a table. We used to pass the
+-- instance but for practical purposes we now avoid this and use a
+-- instance variable. We always have one instance active (sort of global).
--- Beware, loading and saving is overloaded in luat-tmp!
+-- todo: cache:/// home:///
local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
local next, type = next, type
-local lpegmatch = lpeg.match
-local trace_locating, trace_detail, trace_expansions = false, false, false
+local lpegP, lpegS, lpegR, lpegC, lpegCc, lpegCs, lpegCt = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-trackers.register("resolvers.locating", function(v) trace_locating = v end)
-trackers.register("resolvers.details", function(v) trace_detail = v end)
-trackers.register("resolvers.expansions", function(v) trace_expansions = v end) -- todo
+local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join
+local collapse_path = file.collapse_path
-if not resolvers then
- resolvers = {
- suffixes = { },
- formats = { },
- dangerous = { },
- suffixmap = { },
- alternatives = { },
- locators = { }, -- locate databases
- hashers = { }, -- load databases
- generators = { }, -- generate databases
- }
-end
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end)
+local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
+
+local report_resolvers = logs.new("resolvers")
-local resolvers = resolvers
+local expanded_path_from_list = resolvers.expanded_path_from_list
+local checked_variable = resolvers.checked_variable
+local split_configuration_path = resolvers.split_configuration_path
-resolvers.locators .notfound = { nil }
-resolvers.hashers .notfound = { nil }
-resolvers.generators.notfound = { nil }
+local ostype, osname, osenv, ossetenv, osgetenv = os.type, os.name, os.env, os.setenv, os.getenv
resolvers.cacheversion = '1.0.1'
-resolvers.cnfname = 'texmf.cnf'
-resolvers.luaname = 'texmfcnf.lua'
-resolvers.homedir = os.env[os.type == "windows" and 'USERPROFILE'] or os.env['HOME'] or '~'
-resolvers.cnfdefault = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}'
+resolvers.configbanner = ''
+resolvers.homedir = environment.homedir
+resolvers.criticalvars = { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF", "TEXMF", "TEXOS" }
+resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}' -- rubish path
+resolvers.luacnfname = 'texmfcnf.lua'
+resolvers.luacnfstate = "unknown"
-local dummy_path_expr = "^!*unset/*$"
+local unset_variable = "unset"
local formats = resolvers.formats
local suffixes = resolvers.suffixes
@@ -8182,104 +9545,12 @@ local dangerous = resolvers.dangerous
local suffixmap = resolvers.suffixmap
local alternatives = resolvers.alternatives
-formats['afm'] = 'AFMFONTS' suffixes['afm'] = { 'afm' }
-formats['enc'] = 'ENCFONTS' suffixes['enc'] = { 'enc' }
-formats['fmt'] = 'TEXFORMATS' suffixes['fmt'] = { 'fmt' }
-formats['map'] = 'TEXFONTMAPS' suffixes['map'] = { 'map' }
-formats['mp'] = 'MPINPUTS' suffixes['mp'] = { 'mp' }
-formats['ocp'] = 'OCPINPUTS' suffixes['ocp'] = { 'ocp' }
-formats['ofm'] = 'OFMFONTS' suffixes['ofm'] = { 'ofm', 'tfm' }
-formats['otf'] = 'OPENTYPEFONTS' suffixes['otf'] = { 'otf' } -- 'ttf'
-formats['opl'] = 'OPLFONTS' suffixes['opl'] = { 'opl' }
-formats['otp'] = 'OTPINPUTS' suffixes['otp'] = { 'otp' }
-formats['ovf'] = 'OVFFONTS' suffixes['ovf'] = { 'ovf', 'vf' }
-formats['ovp'] = 'OVPFONTS' suffixes['ovp'] = { 'ovp' }
-formats['tex'] = 'TEXINPUTS' suffixes['tex'] = { 'tex' }
-formats['tfm'] = 'TFMFONTS' suffixes['tfm'] = { 'tfm' }
-formats['ttf'] = 'TTFONTS' suffixes['ttf'] = { 'ttf', 'ttc', 'dfont' }
-formats['pfb'] = 'T1FONTS' suffixes['pfb'] = { 'pfb', 'pfa' }
-formats['vf'] = 'VFFONTS' suffixes['vf'] = { 'vf' }
-
-formats['fea'] = 'FONTFEATURES' suffixes['fea'] = { 'fea' }
-formats['cid'] = 'FONTCIDMAPS' suffixes['cid'] = { 'cid', 'cidmap' }
-
-formats ['texmfscripts'] = 'TEXMFSCRIPTS' -- new
-suffixes['texmfscripts'] = { 'rb', 'pl', 'py' } -- 'lua'
-
-formats ['lua'] = 'LUAINPUTS' -- new
-suffixes['lua'] = { 'lua', 'luc', 'tma', 'tmc' }
-
--- backward compatible ones
-
-alternatives['map files'] = 'map'
-alternatives['enc files'] = 'enc'
-alternatives['cid maps'] = 'cid' -- great, why no cid files
-alternatives['font feature files'] = 'fea' -- and fea files here
-alternatives['opentype fonts'] = 'otf'
-alternatives['truetype fonts'] = 'ttf'
-alternatives['truetype collections'] = 'ttc'
-alternatives['truetype dictionary'] = 'dfont'
-alternatives['type1 fonts'] = 'pfb'
-
--- obscure ones
-
-formats ['misc fonts'] = ''
-suffixes['misc fonts'] = { }
-
-formats ['sfd'] = 'SFDFONTS'
-suffixes ['sfd'] = { 'sfd' }
-alternatives['subfont definition files'] = 'sfd'
-
--- lib paths
-
-formats ['lib'] = 'CLUAINPUTS' -- new (needs checking)
-suffixes['lib'] = (os.libsuffix and { os.libsuffix }) or { 'dll', 'so' }
-
--- In practice we will work within one tds tree, but i want to keep
--- the option open to build tools that look at multiple trees, which is
--- why we keep the tree specific data in a table. We used to pass the
--- instance but for practical pusposes we now avoid this and use a
--- instance variable.
-
--- here we catch a few new thingies (todo: add these paths to context.tmf)
---
--- FONTFEATURES = .;$TEXMF/fonts/fea//
--- FONTCIDMAPS = .;$TEXMF/fonts/cid//
-
--- we always have one instance active
-
resolvers.instance = resolvers.instance or nil -- the current one (slow access)
-local instance = resolvers.instance or nil -- the current one (fast access)
+local instance = resolvers.instance or nil -- the current one (fast access)
function resolvers.newinstance()
- -- store once, freeze and faster (once reset we can best use
- -- instance.environment) maybe better have a register suffix
- -- function
-
- for k, v in next, suffixes do
- for i=1,#v do
- local vi = v[i]
- if vi then
- suffixmap[vi] = k
- end
- end
- end
-
- -- because vf searching is somewhat dangerous, we want to prevent
- -- too liberal searching esp because we do a lookup on the current
- -- path anyway; only tex (or any) is safe
-
- for k, v in next, formats do
- dangerous[k] = true
- end
- dangerous.tex = nil
-
- -- the instance
-
local newinstance = {
- rootpath = '',
- treepath = '',
progname = 'context',
engine = 'luatex',
format = '',
@@ -8287,26 +9558,19 @@ function resolvers.newinstance()
variables = { },
expansions = { },
files = { },
- remap = { },
- configuration = { },
- setup = { },
+ setups = { },
order = { },
found = { },
foundintrees = { },
- kpsevars = { },
+ origins = { },
hashes = { },
- cnffiles = { },
- luafiles = { },
+ specification = { },
lists = { },
remember = true,
diskcache = true,
renewcache = false,
- scandisk = true,
- cachepath = nil,
loaderror = false,
- sortdata = false,
savelists = true,
- cleanuppaths = true,
allresults = false,
pattern = nil, -- lists
data = { }, -- only for loading
@@ -8316,8 +9580,8 @@ function resolvers.newinstance()
local ne = newinstance.environment
- for k,v in next, os.env do
- ne[k] = resolvers.bare_variable(v)
+ for k, v in next, osenv do
+ ne[upper(k)] = checked_variable(v)
end
return newinstance
@@ -8339,91 +9603,68 @@ local function reset_hashes()
instance.found = { }
end
-local function check_configuration() -- not yet ok, no time for debugging now
- local ie, iv = instance.environment, instance.variables
- local function fix(varname,default)
- local proname = varname .. "." .. instance.progname or "crap"
- local p, v = ie[proname], ie[varname] or iv[varname]
- if not ((p and p ~= "") or (v and v ~= "")) then
- iv[varname] = default -- or environment?
- end
- end
- local name = os.name
- if name == "windows" then
- fix("OSFONTDIR", "c:/windows/fonts//")
- elseif name == "macosx" then
- fix("OSFONTDIR", "$HOME/Library/Fonts//;/Library/Fonts//;/System/Library/Fonts//")
- else
- -- bad luck
- end
- fix("LUAINPUTS" , ".;$TEXINPUTS;$TEXMFSCRIPTS") -- no progname, hm
- -- this will go away some day
- fix("FONTFEATURES", ".;$TEXMF/fonts/{data,fea}//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS")
- fix("FONTCIDMAPS" , ".;$TEXMF/fonts/{data,cid}//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS")
- --
- fix("LUATEXLIBS" , ".;$TEXMF/luatex/lua//")
-end
-
-function resolvers.bare_variable(str) -- assumes str is a string
- return (gsub(str,"\s*([\"\']?)(.+)%1\s*", "%2"))
-end
-
-function resolvers.settrace(n) -- no longer number but: 'locating' or 'detail'
- if n then
- trackers.disable("resolvers.*")
- trackers.enable("resolvers."..n)
+function resolvers.setenv(key,value)
+ if instance then
+ instance.environment[key] = value
+ ossetenv(key,value)
end
end
-resolvers.settrace(os.getenv("MTX_INPUT_TRACE"))
-
-function resolvers.osenv(key)
- local ie = instance.environment
- local value = ie[key]
- if value == nil then
- -- local e = os.getenv(key)
- local e = os.env[key]
- if e == nil then
- -- value = "" -- false
- else
- value = resolvers.bare_variable(e)
- end
- ie[key] = value
+function resolvers.getenv(key)
+ local value = instance.environment[key]
+ if value and value ~= "" then
+ return value
+ else
+ local e = osgetenv(key)
+ return e ~= nil and e ~= "" and checked_variable(e) or ""
end
- return value or ""
-end
-
-function resolvers.env(key)
- return instance.environment[key] or resolvers.osenv(key)
end
---
+resolvers.env = resolvers.getenv
local function expand_vars(lst) -- simple vars
- local variables, env = instance.variables, resolvers.env
+ local variables, getenv = instance.variables, resolvers.getenv
local function resolve(a)
- return variables[a] or env(a)
+ local va = variables[a] or ""
+ return (va ~= "" and va) or getenv(a) or ""
end
for k=1,#lst do
- lst[k] = gsub(lst[k],"%$([%a%d%_%-]+)",resolve)
+ local var = lst[k]
+ var = gsub(var,"%$([%a%d%_%-]+)",resolve)
+ var = gsub(var,";+",";")
+ var = gsub(var,";[!{}/\\]+;",";")
+ lst[k] = var
end
end
-local function expanded_var(var) -- simple vars
- local function resolve(a)
- return instance.variables[a] or resolvers.env(a)
+local function resolve(key)
+ local value = instance.variables[key]
+ if value and value ~= "" then
+ return value
+ end
+ local value = instance.environment[key]
+ if value and value ~= "" then
+ return value
end
- return (gsub(var,"%$([%a%d%_%-]+)",resolve))
+ local e = osgetenv(key)
+ return e ~= nil and e ~= "" and checked_variable(e) or ""
+end
+
+local function expanded_var(var) -- simple vars
+ var = gsub(var,"%$([%a%d%_%-]+)",resolve)
+ var = gsub(var,";+",";")
+ var = gsub(var,";[!{}/\\]+;",";")
+ return var
end
local function entry(entries,name)
- if name and (name ~= "") then
+ if name and name ~= "" then
name = gsub(name,'%$','')
local result = entries[name..'.'..instance.progname] or entries[name]
if result then
return result
else
- result = resolvers.env(name)
+ result = resolvers.getenv(name)
if result then
instance.variables[name] = result
resolvers.expand_variables()
@@ -8443,438 +9684,147 @@ local function is_entry(entries,name)
end
end
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
--- this one is better and faster, but it took me a while to realize
--- that this kind of replacement is cleaner than messy parsing and
--- fuzzy concatenating we can probably gain a bit with selectively
--- applying lpeg, but experiments with lpeg parsing this proved not to
--- work that well; the parsing is ok, but dealing with the resulting
--- table is a pain because we need to work inside-out recursively
-
-local function do_first(a,b)
- local t = { }
- for s in gmatch(b,"[^,]+") do t[#t+1] = a .. s end
- return "{" .. concat(t,",") .. "}"
-end
-
-local function do_second(a,b)
- local t = { }
- for s in gmatch(a,"[^,]+") do t[#t+1] = s .. b end
- return "{" .. concat(t,",") .. "}"
-end
-
-local function do_both(a,b)
- local t = { }
- for sa in gmatch(a,"[^,]+") do
- for sb in gmatch(b,"[^,]+") do
- t[#t+1] = sa .. sb
- end
- end
- return "{" .. concat(t,",") .. "}"
-end
-
-local function do_three(a,b,c)
- return a .. b.. c
-end
-
-local function splitpathexpr(str, t, validate)
- -- no need for further optimization as it is only called a
- -- few times, we can use lpeg for the sub
- if trace_expansions then
- logs.report("fileio","expanding variable '%s'",str)
- end
- t = t or { }
- str = gsub(str,",}",",@}")
- str = gsub(str,"{,","{@,")
- -- str = "@" .. str .. "@"
- local ok, done
- while true do
- done = false
- while true do
- str, ok = gsub(str,"([^{},]+){([^{}]+)}",do_first)
- if ok > 0 then done = true else break end
- end
- while true do
- str, ok = gsub(str,"{([^{}]+)}([^{},]+)",do_second)
- if ok > 0 then done = true else break end
- end
- while true do
- str, ok = gsub(str,"{([^{}]+)}{([^{}]+)}",do_both)
- if ok > 0 then done = true else break end
- end
- str, ok = gsub(str,"({[^{}]*){([^{}]+)}([^{}]*})",do_three)
- if ok > 0 then done = true end
- if not done then break end
- end
- str = gsub(str,"[{}]", "")
- str = gsub(str,"@","")
- if validate then
- for s in gmatch(str,"[^,]+") do
- s = validate(s)
- if s then t[#t+1] = s end
- end
- else
- for s in gmatch(str,"[^,]+") do
- t[#t+1] = s
- end
- end
- if trace_expansions then
- for k=1,#t do
- logs.report("fileio","% 4i: %s",k,t[k])
- end
- end
- return t
-end
-
-local function expanded_path_from_list(pathlist) -- maybe not a list, just a path
- -- a previous version fed back into pathlist
- local newlist, ok = { }, false
- for k=1,#pathlist do
- if find(pathlist[k],"[{}]") then
- ok = true
- break
- end
- end
- if ok then
- local function validate(s)
- s = file.collapse_path(s)
- return s ~= "" and not find(s,dummy_path_expr) and s
- end
- for k=1,#pathlist do
- splitpathexpr(pathlist[k],newlist,validate)
- end
- else
- for k=1,#pathlist do
- for p in gmatch(pathlist[k],"([^,]+)") do
- p = file.collapse_path(p)
- if p ~= "" then newlist[#newlist+1] = p end
- end
- end
- end
- return newlist
-end
-
--- we follow a rather traditional approach:
---
--- (1) texmf.cnf given in TEXMFCNF
--- (2) texmf.cnf searched in default variable
---
--- also we now follow the stupid route: if not set then just assume *one*
--- cnf file under texmf (i.e. distribution)
-
-local args = environment and environment.original_arguments or arg -- this needs a cleanup
-
-resolvers.ownbin = resolvers.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex"
-resolvers.ownbin = gsub(resolvers.ownbin,"\\","/")
-
-function resolvers.getownpath()
- local ownpath = resolvers.ownpath or os.selfdir
- if not ownpath or ownpath == "" or ownpath == "unset" then
- ownpath = args[-1] or arg[-1]
- ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/"))
- if not ownpath or ownpath == "" then
- ownpath = args[-0] or arg[-0]
- ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/"))
- end
- local binary = resolvers.ownbin
- if not ownpath or ownpath == "" then
- ownpath = ownpath and file.dirname(binary)
- end
- if not ownpath or ownpath == "" then
- if os.binsuffix ~= "" then
- binary = file.replacesuffix(binary,os.binsuffix)
- end
- for p in gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do
- local b = file.join(p,binary)
- if lfs.isfile(b) then
- -- we assume that after changing to the path the currentdir function
- -- resolves to the real location and use this side effect here; this
- -- trick is needed because on the mac installations use symlinks in the
- -- path instead of real locations
- local olddir = lfs.currentdir()
- if lfs.chdir(p) then
- local pp = lfs.currentdir()
- if trace_locating and p ~= pp then
- logs.report("fileio","following symlink '%s' to '%s'",p,pp)
- end
- ownpath = pp
- lfs.chdir(olddir)
- else
- if trace_locating then
- logs.report("fileio","unable to check path '%s'",p)
- end
- ownpath = p
- end
- break
- end
- end
- end
- if not ownpath or ownpath == "" then
- ownpath = "."
- logs.report("fileio","forcing fallback ownpath .")
- elseif trace_locating then
- logs.report("fileio","using ownpath '%s'",ownpath)
- end
- end
- resolvers.ownpath = ownpath
- function resolvers.getownpath()
- return resolvers.ownpath
- end
- return ownpath
-end
-
-local own_places = { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF" }
-
-local function identify_own()
- local ownpath = resolvers.getownpath() or dir.current()
- local ie = instance.environment
- if ownpath then
- if resolvers.env('SELFAUTOLOC') == "" then os.env['SELFAUTOLOC'] = file.collapse_path(ownpath) end
- if resolvers.env('SELFAUTODIR') == "" then os.env['SELFAUTODIR'] = file.collapse_path(ownpath .. "/..") end
- if resolvers.env('SELFAUTOPARENT') == "" then os.env['SELFAUTOPARENT'] = file.collapse_path(ownpath .. "/../..") end
- else
- logs.report("fileio","error: unable to locate ownpath")
- os.exit()
- end
- if resolvers.env('TEXMFCNF') == "" then os.env['TEXMFCNF'] = resolvers.cnfdefault end
- if resolvers.env('TEXOS') == "" then os.env['TEXOS'] = resolvers.env('SELFAUTODIR') end
- if resolvers.env('TEXROOT') == "" then os.env['TEXROOT'] = resolvers.env('SELFAUTOPARENT') end
+function resolvers.report_critical_variables()
if trace_locating then
- for i=1,#own_places do
- local v = own_places[i]
- logs.report("fileio","variable '%s' set to '%s'",v,resolvers.env(v) or "unknown")
+ for i=1,#resolvers.criticalvars do
+ local v = resolvers.criticalvars[i]
+ report_resolvers("variable '%s' set to '%s'",v,resolvers.getenv(v) or "unknown")
end
+ report_resolvers()
end
- identify_own = function() end
+ resolvers.report_critical_variables = function() end
end
-function resolvers.identify_cnf()
- if #instance.cnffiles == 0 then
- -- fallback
- identify_own()
- -- the real search
- resolvers.expand_variables()
- local t = resolvers.split_path(resolvers.env('TEXMFCNF'))
- t = expanded_path_from_list(t)
- expand_vars(t) -- redundant
- local function locate(filename,list)
- for i=1,#t do
- local ti = t[i]
- local texmfcnf = file.collapse_path(file.join(ti,filename))
- if lfs.isfile(texmfcnf) then
- list[#list+1] = texmfcnf
- end
- end
- end
- locate(resolvers.luaname,instance.luafiles)
- locate(resolvers.cnfname,instance.cnffiles)
- end
-end
-
-local function load_cnf_file(fname)
- fname = resolvers.clean_path(fname)
- local lname = file.replacesuffix(fname,'lua')
- if lfs.isfile(lname) then
- local dname = file.dirname(fname) -- fname ?
- if not instance.configuration[dname] then
- resolvers.load_data(dname,'configuration',lname and file.basename(lname))
- instance.order[#instance.order+1] = instance.configuration[dname]
+local function identify_configuration_files()
+ local specification = instance.specification
+ if #specification == 0 then
+ local cnfspec = resolvers.getenv('TEXMFCNF')
+ if cnfspec == "" then
+ cnfspec = resolvers.luacnfspec
+ resolvers.luacnfstate = "default"
+ else
+ resolvers.luacnfstate = "environment"
end
- else
- f = io.open(fname)
- if f then
- if trace_locating then
- logs.report("fileio","loading configuration file %s", fname)
- end
- local line, data, n, k, v
- local dname = file.dirname(fname)
- if not instance.configuration[dname] then
- instance.configuration[dname] = { }
- instance.order[#instance.order+1] = instance.configuration[dname]
- end
- local data = instance.configuration[dname]
- while true do
- local line, n = f:read(), 0
- if line then
- while true do -- join lines
- line, n = gsub(line,"\\%s*$", "")
- if n > 0 then
- line = line .. f:read()
- else
- break
+ resolvers.report_critical_variables()
+ resolvers.expand_variables()
+ local cnfpaths = expanded_path_from_list(resolvers.split_path(cnfspec))
+ expand_vars(cnfpaths) --- hm
+ local luacnfname = resolvers.luacnfname
+ for i=1,#cnfpaths do
+ local filename = collapse_path(filejoin(cnfpaths[i],luacnfname))
+ if lfs.isfile(filename) then
+ specification[#specification+1] = filename
+ end
+ end
+ end
+end
+
+local function load_configuration_files()
+ local specification = instance.specification
+ if #specification > 0 then
+ local luacnfname = resolvers.luacnfname
+ for i=1,#specification do
+ local filename = specification[i]
+ local pathname = filedirname(filename)
+ local filename = filejoin(pathname,luacnfname)
+ local blob = loadfile(filename)
+ if blob then
+ local data = blob()
+ data = data and data.content
+ local setups = instance.setups
+ if data then
+ if trace_locating then
+ report_resolvers("loading configuration file '%s'",filename)
+ report_resolvers()
+ end
+ -- flattening is easier to deal with as we need to collapse
+ local t = { }
+ for k, v in next, data do -- v = progname
+ if v ~= unset_variable then
+ local kind = type(v)
+ if kind == "string" then
+ t[k] = v
+ elseif kind == "table" then
+ -- this operates on the table directly
+ setters.initialize(filename,k,v)
+ -- this doesn't (maybe metatables some day)
+ for kk, vv in next, v do -- vv = variable
+ if vv ~= unset_variable then
+ if type(vv) == "string" then
+ t[kk.."."..k] = vv
+ end
+ end
+ end
+ else
+ -- report_resolvers("strange key '%s' in configuration file '%s'",k,filename)
+ end
end
end
- if not find(line,"^[%%#]") then
- local l = gsub(line,"%s*%%.*$","")
- local k, v = match(l,"%s*(.-)%s*=%s*(.-)%s*$")
- if k and v and not data[k] then
- v = gsub(v,"[%%#].*",'')
- data[k] = gsub(v,"~","$HOME")
- instance.kpsevars[k] = true
+ setups[pathname] = t
+
+ if resolvers.luacnfstate == "default" then
+ -- the following code is not tested
+ local cnfspec = t["TEXMFCNF"]
+ if cnfspec then
+ -- we push the value into the main environment (osenv) so
+ -- that it takes precedence over the default one and therefore
+ -- also over following definitions
+ resolvers.setenv('TEXMFCNF',cnfspec)
+ -- we now identify and load the specified configuration files
+ instance.specification = { }
+ identify_configuration_files()
+ load_configuration_files()
+ -- we prevent further overload of the configuration variable
+ resolvers.luacnfstate = "configuration"
+ -- we quit the outer loop
+ break
end
end
+
else
- break
+ if trace_locating then
+ report_resolvers("skipping configuration file '%s'",filename)
+ end
+ setups[pathname] = { }
+ instance.loaderror = true
end
+ elseif trace_locating then
+ report_resolvers("skipping configuration file '%s'",filename)
+ end
+ instance.order[#instance.order+1] = instance.setups[pathname]
+ if instance.loaderror then
+ break
end
- f:close()
- elseif trace_locating then
- logs.report("fileio","skipping configuration file '%s'", fname)
end
+ elseif trace_locating then
+ report_resolvers("warning: no lua configuration files found")
end
end
-local function collapse_cnf_data() -- potential optimization: pass start index (setup and configuration are shared)
- local order = instance.order
+local function collapse_configuration_data() -- potential optimization: pass start index (setup and configuration are shared)
+ local order, variables, environment, origins = instance.order, instance.variables, instance.environment, instance.origins
for i=1,#order do
local c = order[i]
for k,v in next, c do
- if not instance.variables[k] then
- if instance.environment[k] then
- instance.variables[k] = instance.environment[k]
+ if variables[k] then
+ -- okay
+ else
+ local ek = environment[k]
+ if ek and ek ~= "" then
+ variables[k], origins[k] = ek, "env"
else
- instance.kpsevars[k] = true
- instance.variables[k] = resolvers.bare_variable(v)
+ local bv = checked_variable(v)
+ variables[k], origins[k] = bv, "cnf"
end
end
end
end
end
-function resolvers.load_cnf()
- local function loadoldconfigdata()
- local cnffiles = instance.cnffiles
- for i=1,#cnffiles do
- load_cnf_file(cnffiles[i])
- end
- end
- -- instance.cnffiles contain complete names now !
- -- we still use a funny mix of cnf and new but soon
- -- we will switch to lua exclusively as we only use
- -- the file to collect the tree roots
- if #instance.cnffiles == 0 then
- if trace_locating then
- logs.report("fileio","no cnf files found (TEXMFCNF may not be set/known)")
- end
- else
- local cnffiles = instance.cnffiles
- instance.rootpath = cnffiles[1]
- for k=1,#cnffiles do
- instance.cnffiles[k] = file.collapse_path(cnffiles[k])
- end
- for i=1,3 do
- instance.rootpath = file.dirname(instance.rootpath)
- end
- instance.rootpath = file.collapse_path(instance.rootpath)
- if instance.diskcache and not instance.renewcache then
- resolvers.loadoldconfig(instance.cnffiles)
- if instance.loaderror then
- loadoldconfigdata()
- resolvers.saveoldconfig()
- end
- else
- loadoldconfigdata()
- if instance.renewcache then
- resolvers.saveoldconfig()
- end
- end
- collapse_cnf_data()
- end
- check_configuration()
-end
-
-function resolvers.load_lua()
- if #instance.luafiles == 0 then
- -- yet harmless
- else
- instance.rootpath = instance.luafiles[1]
- local luafiles = instance.luafiles
- for k=1,#luafiles do
- instance.luafiles[k] = file.collapse_path(luafiles[k])
- end
- for i=1,3 do
- instance.rootpath = file.dirname(instance.rootpath)
- end
- instance.rootpath = file.collapse_path(instance.rootpath)
- resolvers.loadnewconfig()
- collapse_cnf_data()
- end
- check_configuration()
-end
-
-- database loading
-function resolvers.load_hash()
- resolvers.locatelists()
- if instance.diskcache and not instance.renewcache then
- resolvers.loadfiles()
- if instance.loaderror then
- resolvers.loadlists()
- resolvers.savefiles()
- end
- else
- resolvers.loadlists()
- if instance.renewcache then
- resolvers.savefiles()
- end
- end
-end
-
-function resolvers.append_hash(type,tag,name)
- if trace_locating then
- logs.report("fileio","hash '%s' appended",tag)
- end
- insert(instance.hashes, { ['type']=type, ['tag']=tag, ['name']=name } )
-end
-
-function resolvers.prepend_hash(type,tag,name)
- if trace_locating then
- logs.report("fileio","hash '%s' prepended",tag)
- end
- insert(instance.hashes, 1, { ['type']=type, ['tag']=tag, ['name']=name } )
-end
-
-function resolvers.extend_texmf_var(specification) -- crap, we could better prepend the hash
--- local t = resolvers.expanded_path_list('TEXMF') -- full expansion
- local t = resolvers.split_path(resolvers.env('TEXMF'))
- insert(t,1,specification)
- local newspec = concat(t,";")
- if instance.environment["TEXMF"] then
- instance.environment["TEXMF"] = newspec
- elseif instance.variables["TEXMF"] then
- instance.variables["TEXMF"] = newspec
- else
- -- weird
- end
- resolvers.expand_variables()
- reset_hashes()
-end
-
-- locators
-function resolvers.locatelists()
- local texmfpaths = resolvers.clean_path_list('TEXMF')
- for i=1,#texmfpaths do
- local path = texmfpaths[i]
- if trace_locating then
- logs.report("fileio","locating list of '%s'",path)
- end
- resolvers.locatedatabase(file.collapse_path(path))
- end
-end
-
function resolvers.locatedatabase(specification)
return resolvers.methodhandler('locators', specification)
end
@@ -8882,11 +9832,11 @@ end
function resolvers.locators.tex(specification)
if specification and specification ~= '' and lfs.isdir(specification) then
if trace_locating then
- logs.report("fileio","tex locator '%s' found",specification)
+ report_resolvers("tex locator '%s' found",specification)
end
- resolvers.append_hash('file',specification,filename)
+ resolvers.append_hash('file',specification,filename,true) -- cache
elseif trace_locating then
- logs.report("fileio","tex locator '%s' not found",specification)
+ report_resolvers("tex locator '%s' not found",specification)
end
end
@@ -8896,9 +9846,8 @@ function resolvers.hashdatabase(tag,name)
return resolvers.methodhandler('hashers',tag,name)
end
-function resolvers.loadfiles()
- instance.loaderror = false
- instance.files = { }
+local function load_file_databases()
+ instance.loaderror, instance.files = false, { }
if not instance.renewcache then
local hashes = instance.hashes
for k=1,#hashes do
@@ -8909,194 +9858,134 @@ function resolvers.loadfiles()
end
end
-function resolvers.hashers.tex(tag,name)
- resolvers.load_data(tag,'files')
-end
-
--- generators:
-
-function resolvers.loadlists()
- local hashes = instance.hashes
- for i=1,#hashes do
- resolvers.generatedatabase(hashes[i].tag)
+function resolvers.hashers.tex(tag,name) -- used where?
+ local content = caches.loadcontent(tag,'files')
+ if content then
+ instance.files[tag] = content
+ else
+ instance.files[tag] = { }
+ instance.loaderror = true
end
end
-function resolvers.generatedatabase(specification)
- return resolvers.methodhandler('generators', specification)
-end
-
--- starting with . or .. etc or funny char
-
-local weird = lpeg.P(".")^1 + lpeg.anywhere(lpeg.S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
-
---~ local l_forbidden = lpeg.S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t")
---~ local l_confusing = lpeg.P(" ")
---~ local l_character = lpeg.patterns.utf8
---~ local l_dangerous = lpeg.P(".")
-
---~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * lpeg.P(-1)
---~ ----- l_normal = l_normal * lpeg.Cc(true) + lpeg.Cc(false)
-
---~ local function test(str)
---~ print(str,lpeg.match(l_normal,str))
---~ end
---~ test("ヒラギノ明朝 Pro W3")
---~ test("..ヒラギノ明朝 Pro W3")
---~ test(":ヒラギノ明朝 Pro W3;")
---~ test("ヒラギノ明朝 /Pro W3;")
---~ test("ヒラギノ明朝 Pro W3")
-
-function resolvers.generators.tex(specification)
- local tag = specification
- if trace_locating then
- logs.report("fileio","scanning path '%s'",specification)
- end
- instance.files[tag] = { }
- local files = instance.files[tag]
- local n, m, r = 0, 0, 0
- local spec = specification .. '/'
- local attributes = lfs.attributes
- local directory = lfs.dir
- local function action(path)
- local full
- if path then
- full = spec .. path .. '/'
- else
- full = spec
- end
- for name in directory(full) do
- if not lpegmatch(weird,name) then
- -- if lpegmatch(l_normal,name) then
- local mode = attributes(full..name,'mode')
- if mode == 'file' then
- if path then
- n = n + 1
- local f = files[name]
- if f then
- if type(f) == 'string' then
- files[name] = { f, path }
- else
- f[#f+1] = path
- end
- else -- probably unique anyway
- files[name] = path
- local lower = lower(name)
- if name ~= lower then
- files["remap:"..lower] = name
- r = r + 1
- end
- end
+local function locate_file_databases()
+ -- todo: cache:// and tree:// (runtime)
+ local texmfpaths = resolvers.expanded_path_list('TEXMF')
+ for i=1,#texmfpaths do
+ local path = collapse_path(texmfpaths[i])
+ local stripped = gsub(path,"^!!","")
+ local runtime = stripped == path
+ path = resolvers.clean_path(path)
+ if stripped ~= "" then
+ if lfs.isdir(path) then
+ local spec = resolvers.splitmethod(stripped)
+ if spec.scheme == "cache" then
+ stripped = spec.path
+ elseif runtime and (spec.noscheme or spec.scheme == "file") then
+ stripped = "tree:///" .. stripped
+ end
+ if trace_locating then
+ if runtime then
+ report_resolvers("locating list of '%s' (runtime)",path)
+ else
+ report_resolvers("locating list of '%s' (cached)",path)
end
- elseif mode == 'directory' then
- m = m + 1
- if path then
- action(path..'/'..name)
+ end
+ resolvers.locatedatabase(stripped) -- nothing done with result
+ else
+ if trace_locating then
+ if runtime then
+ report_resolvers("skipping list of '%s' (runtime)",path)
else
- action(name)
+ report_resolvers("skipping list of '%s' (cached)",path)
end
end
end
end
end
- action()
if trace_locating then
- logs.report("fileio","%s files found on %s directories with %s uppercase remappings",n,m,r)
+ report_resolvers()
end
end
--- savers, todo
-
-function resolvers.savefiles()
- resolvers.save_data('files')
+local function generate_file_databases()
+ local hashes = instance.hashes
+ for i=1,#hashes do
+ resolvers.methodhandler('generators',hashes[i].tag)
+ end
+ if trace_locating then
+ report_resolvers()
+ end
end
--- A config (optionally) has the paths split in tables. Internally
--- we join them and split them after the expansion has taken place. This
--- is more convenient.
-
---~ local checkedsplit = string.checkedsplit
-
-local cache = { }
-
-local splitter = lpeg.Ct(lpeg.splitat(lpeg.S(os.type == "windows" and ";" or ":;")))
-
-local function split_kpse_path(str) -- beware, this can be either a path or a {specification}
- local found = cache[str]
- if not found then
- if str == "" then
- found = { }
- else
- str = gsub(str,"\\","/")
---~ local split = (find(str,";") and checkedsplit(str,";")) or checkedsplit(str,io.pathseparator)
-local split = lpegmatch(splitter,str)
- found = { }
- for i=1,#split do
- local s = split[i]
- if not find(s,"^{*unset}*") then
- found[#found+1] = s
- end
- end
- if trace_expansions then
- logs.report("fileio","splitting path specification '%s'",str)
- for k=1,#found do
- logs.report("fileio","% 4i: %s",k,found[k])
- end
- end
- cache[str] = found
+local function save_file_databases() -- will become cachers
+ for i=1,#instance.hashes do
+ local hash = instance.hashes[i]
+ local cachename = hash.tag
+ if hash.cache then
+ local content = instance.files[cachename]
+ caches.collapsecontent(content)
+ caches.savecontent(cachename,"files",content)
+ elseif trace_locating then
+ report_resolvers("not saving runtime tree '%s'",cachename)
end
end
- return found
end
-resolvers.split_kpse_path = split_kpse_path
-
-function resolvers.splitconfig()
- for i=1,#instance do
- local c = instance[i]
- for k,v in next, c do
- if type(v) == 'string' then
- local t = split_kpse_path(v)
- if #t > 1 then
- c[k] = t
- end
- end
+local function load_databases()
+ locate_file_databases()
+ if instance.diskcache and not instance.renewcache then
+ load_file_databases()
+ if instance.loaderror then
+ generate_file_databases()
+ save_file_databases()
+ end
+ else
+ generate_file_databases()
+ if instance.renewcache then
+ save_file_databases()
end
end
end
-function resolvers.joinconfig()
- local order = instance.order
- for i=1,#order do
- local c = order[i]
- for k,v in next, c do -- indexed?
- if type(v) == 'table' then
- c[k] = file.join_path(v)
- end
- end
+function resolvers.append_hash(type,tag,name,cache)
+ if trace_locating then
+ report_resolvers("hash '%s' appended",tag)
end
+ insert(instance.hashes, { type = type, tag = tag, name = name, cache = cache } )
end
-function resolvers.split_path(str)
- if type(str) == 'table' then
- return str
- else
- return split_kpse_path(str)
+function resolvers.prepend_hash(type,tag,name,cache)
+ if trace_locating then
+ report_resolvers("hash '%s' prepended",tag)
end
+ insert(instance.hashes, 1, { type = type, tag = tag, name = name, cache = cache } )
end
-function resolvers.join_path(str)
- if type(str) == 'table' then
- return file.join_path(str)
+function resolvers.extend_texmf_var(specification) -- crap, we could better prepend the hash
+-- local t = resolvers.expanded_path_list('TEXMF') -- full expansion
+ local t = resolvers.split_path(resolvers.getenv('TEXMF'))
+ insert(t,1,specification)
+ local newspec = concat(t,";")
+ if instance.environment["TEXMF"] then
+ instance.environment["TEXMF"] = newspec
+ elseif instance.variables["TEXMF"] then
+ instance.variables["TEXMF"] = newspec
else
- return str
+ -- weird
end
+ resolvers.expand_variables()
+ reset_hashes()
+end
+
+function resolvers.generators.tex(specification,tag)
+ instance.files[tag or specification] = resolvers.scan_files(specification)
end
function resolvers.splitexpansions()
local ie = instance.expansions
for k,v in next, ie do
- local t, h, p = { }, { }, split_kpse_path(v)
+ local t, h, p = { }, { }, split_configuration_path(v)
for kk=1,#p do
local vv = p[kk]
if vv ~= "" and not h[vv] then
@@ -9114,222 +10003,22 @@ end
-- end of split/join code
-function resolvers.saveoldconfig()
- resolvers.splitconfig()
- resolvers.save_data('configuration')
- resolvers.joinconfig()
-end
-
-resolvers.configbanner = [[
--- This is a Luatex configuration file created by 'luatools.lua' or
--- 'luatex.exe' directly. For comment, suggestions and questions you can
--- contact the ConTeXt Development Team. This configuration file is
--- not copyrighted. [HH & TH]
-]]
-
-function resolvers.serialize(files)
- -- This version is somewhat optimized for the kind of
- -- tables that we deal with, so it's much faster than
- -- the generic serializer. This makes sense because
- -- luatools and mtxtools are called frequently. Okay,
- -- we pay a small price for properly tabbed tables.
- local t = { }
- local function dump(k,v,m) -- could be moved inline
- if type(v) == 'string' then
- return m .. "['" .. k .. "']='" .. v .. "',"
- elseif #v == 1 then
- return m .. "['" .. k .. "']='" .. v[1] .. "',"
- else
- return m .. "['" .. k .. "']={'" .. concat(v,"','").. "'},"
- end
- end
- t[#t+1] = "return {"
- if instance.sortdata then
- local sortedfiles = sortedkeys(files)
- for i=1,#sortedfiles do
- local k = sortedfiles[i]
- local fk = files[k]
- if type(fk) == 'table' then
- t[#t+1] = "\t['" .. k .. "']={"
- local sortedfk = sortedkeys(fk)
- for j=1,#sortedfk do
- local kk = sortedfk[j]
- t[#t+1] = dump(kk,fk[kk],"\t\t")
- end
- t[#t+1] = "\t},"
- else
- t[#t+1] = dump(k,fk,"\t")
- end
- end
- else
- for k, v in next, files do
- if type(v) == 'table' then
- t[#t+1] = "\t['" .. k .. "']={"
- for kk,vv in next, v do
- t[#t+1] = dump(kk,vv,"\t\t")
- end
- t[#t+1] = "\t},"
- else
- t[#t+1] = dump(k,v,"\t")
- end
- end
- end
- t[#t+1] = "}"
- return concat(t,"\n")
-end
-
-local data_state = { }
+-- we used to have 'files' and 'configurations' so therefore the following
+-- shared function
function resolvers.data_state()
- return data_state or { }
-end
-
-function resolvers.save_data(dataname, makename) -- untested without cache overload
- for cachename, files in next, instance[dataname] do
- local name = (makename or file.join)(cachename,dataname)
- local luaname, lucname = name .. ".lua", name .. ".luc"
- if trace_locating then
- logs.report("fileio","preparing '%s' for '%s'",dataname,cachename)
- end
- for k, v in next, files do
- if type(v) == "table" and #v == 1 then
- files[k] = v[1]
- end
- end
- local data = {
- type = dataname,
- root = cachename,
- version = resolvers.cacheversion,
- date = os.date("%Y-%m-%d"),
- time = os.date("%H:%M:%S"),
- content = files,
- uuid = os.uuid(),
- }
- local ok = io.savedata(luaname,resolvers.serialize(data))
- if ok then
- if trace_locating then
- logs.report("fileio","'%s' saved in '%s'",dataname,luaname)
- end
- if utils.lua.compile(luaname,lucname,false,true) then -- no cleanup but strip
- if trace_locating then
- logs.report("fileio","'%s' compiled to '%s'",dataname,lucname)
- end
- else
- if trace_locating then
- logs.report("fileio","compiling failed for '%s', deleting file '%s'",dataname,lucname)
- end
- os.remove(lucname)
- end
- elseif trace_locating then
- logs.report("fileio","unable to save '%s' in '%s' (access error)",dataname,luaname)
- end
- end
-end
-
-function resolvers.load_data(pathname,dataname,filename,makename) -- untested without cache overload
- filename = ((not filename or (filename == "")) and dataname) or filename
- filename = (makename and makename(dataname,filename)) or file.join(pathname,filename)
- local blob = loadfile(filename .. ".luc") or loadfile(filename .. ".lua")
- if blob then
- local data = blob()
- if data and data.content and data.type == dataname and data.version == resolvers.cacheversion then
- data_state[#data_state+1] = data.uuid
- if trace_locating then
- logs.report("fileio","loading '%s' for '%s' from '%s'",dataname,pathname,filename)
- end
- instance[dataname][pathname] = data.content
- else
- if trace_locating then
- logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename)
- end
- instance[dataname][pathname] = { }
- instance.loaderror = true
- end
- elseif trace_locating then
- logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename)
- end
-end
-
--- some day i'll use the nested approach, but not yet (actually we even drop
--- engine/progname support since we have only luatex now)
---
--- first texmfcnf.lua files are located, next the cached texmf.cnf files
---
--- return {
--- TEXMFBOGUS = 'effe checken of dit werkt',
--- }
-
-function resolvers.resetconfig()
- identify_own()
- instance.configuration, instance.setup, instance.order, instance.loaderror = { }, { }, { }, false
-end
-
-function resolvers.loadnewconfig()
- local luafiles = instance.luafiles
- for i=1,#luafiles do
- local cnf = luafiles[i]
- local pathname = file.dirname(cnf)
- local filename = file.join(pathname,resolvers.luaname)
- local blob = loadfile(filename)
- if blob then
- local data = blob()
- if data then
- if trace_locating then
- logs.report("fileio","loading configuration file '%s'",filename)
- end
- if true then
- -- flatten to variable.progname
- local t = { }
- for k, v in next, data do -- v = progname
- if type(v) == "string" then
- t[k] = v
- else
- for kk, vv in next, v do -- vv = variable
- if type(vv) == "string" then
- t[vv.."."..v] = kk
- end
- end
- end
- end
- instance['setup'][pathname] = t
- else
- instance['setup'][pathname] = data
- end
- else
- if trace_locating then
- logs.report("fileio","skipping configuration file '%s'",filename)
- end
- instance['setup'][pathname] = { }
- instance.loaderror = true
- end
- elseif trace_locating then
- logs.report("fileio","skipping configuration file '%s'",filename)
- end
- instance.order[#instance.order+1] = instance.setup[pathname]
- if instance.loaderror then break end
- end
-end
-
-function resolvers.loadoldconfig()
- if not instance.renewcache then
- local cnffiles = instance.cnffiles
- for i=1,#cnffiles do
- local cnf = cnffiles[i]
- local dname = file.dirname(cnf)
- resolvers.load_data(dname,'configuration')
- instance.order[#instance.order+1] = instance.configuration[dname]
- if instance.loaderror then break end
- end
- end
- resolvers.joinconfig()
+ return caches.contentstate()
end
function resolvers.expand_variables()
local expansions, environment, variables = { }, instance.environment, instance.variables
- local env = resolvers.env
+ local getenv = resolvers.getenv
instance.expansions = expansions
- if instance.engine ~= "" then environment['engine'] = instance.engine end
- if instance.progname ~= "" then environment['progname'] = instance.progname end
+ local engine, progname = instance.engine, instance.progname
+ if type(engine) ~= "string" then instance.engine, engine = "", "" end
+ if type(progname) ~= "string" then instance.progname, progname = "", "" end
+ if engine ~= "" then environment['engine'] = engine end
+ if progname ~= "" then environment['progname'] = progname end
for k,v in next, environment do
local a, b = match(k,"^(%a+)%_(.*)%s*$")
if a and b then
@@ -9338,7 +10027,7 @@ function resolvers.expand_variables()
expansions[k] = v
end
end
- for k,v in next, environment do -- move environment to expansions
+ for k,v in next, environment do -- move environment to expansions (variables are already in there)
if not expansions[k] then expansions[k] = v end
end
for k,v in next, variables do -- move variables to expansions
@@ -9347,7 +10036,7 @@ function resolvers.expand_variables()
local busy = false
local function resolve(a)
busy = true
- return expansions[a] or env(a)
+ return expansions[a] or getenv(a)
end
while true do
busy = false
@@ -9355,6 +10044,8 @@ function resolvers.expand_variables()
local s, n = gsub(v,"%$([%a%d%_%-]+)",resolve)
local s, m = gsub(s,"%$%{([%a%d%_%-]+)%}",resolve)
if n > 0 or m > 0 then
+ s = gsub(s,";+",";")
+ s = gsub(s,";[!{}/\\]+;",";")
expansions[k]= s
end
end
@@ -9391,63 +10082,59 @@ function resolvers.unexpanded_path(str)
return file.join_path(resolvers.unexpanded_path_list(str))
end
-do -- no longer needed
-
- local done = { }
+local done = { }
- function resolvers.reset_extra_path()
- local ep = instance.extra_paths
- if not ep then
- ep, done = { }, { }
- instance.extra_paths = ep
- elseif #ep > 0 then
- instance.lists, done = { }, { }
- end
+function resolvers.reset_extra_path()
+ local ep = instance.extra_paths
+ if not ep then
+ ep, done = { }, { }
+ instance.extra_paths = ep
+ elseif #ep > 0 then
+ instance.lists, done = { }, { }
end
+end
- function resolvers.register_extra_path(paths,subpaths)
- local ep = instance.extra_paths or { }
- local n = #ep
- if paths and paths ~= "" then
- if subpaths and subpaths ~= "" then
- for p in gmatch(paths,"[^,]+") do
- -- we gmatch each step again, not that fast, but used seldom
- for s in gmatch(subpaths,"[^,]+") do
- local ps = p .. "/" .. s
- if not done[ps] then
- ep[#ep+1] = resolvers.clean_path(ps)
- done[ps] = true
- end
- end
- end
- else
- for p in gmatch(paths,"[^,]+") do
- if not done[p] then
- ep[#ep+1] = resolvers.clean_path(p)
- done[p] = true
- end
- end
- end
- elseif subpaths and subpaths ~= "" then
- for i=1,n do
+function resolvers.register_extra_path(paths,subpaths)
+ local ep = instance.extra_paths or { }
+ local n = #ep
+ if paths and paths ~= "" then
+ if subpaths and subpaths ~= "" then
+ for p in gmatch(paths,"[^,]+") do
-- we gmatch each step again, not that fast, but used seldom
for s in gmatch(subpaths,"[^,]+") do
- local ps = ep[i] .. "/" .. s
+ local ps = p .. "/" .. s
if not done[ps] then
ep[#ep+1] = resolvers.clean_path(ps)
done[ps] = true
end
end
end
+ else
+ for p in gmatch(paths,"[^,]+") do
+ if not done[p] then
+ ep[#ep+1] = resolvers.clean_path(p)
+ done[p] = true
+ end
+ end
end
- if #ep > 0 then
- instance.extra_paths = ep -- register paths
- end
- if #ep > n then
- instance.lists = { } -- erase the cache
+ elseif subpaths and subpaths ~= "" then
+ for i=1,n do
+ -- we gmatch each step again, not that fast, but used seldom
+ for s in gmatch(subpaths,"[^,]+") do
+ local ps = ep[i] .. "/" .. s
+ if not done[ps] then
+ ep[#ep+1] = resolvers.clean_path(ps)
+ done[ps] = true
+ end
+ end
end
end
-
+ if #ep > 0 then
+ instance.extra_paths = ep -- register paths
+ end
+ if #ep > n then
+ instance.lists = { } -- erase the cache
+ end
end
local function made_list(instance,list)
@@ -9492,7 +10179,7 @@ function resolvers.clean_path_list(str)
local t = resolvers.expanded_path_list(str)
if t then
for i=1,#t do
- t[i] = file.collapse_path(resolvers.clean_path(t[i]))
+ t[i] = collapse_path(resolvers.clean_path(t[i]))
end
end
return t
@@ -9532,33 +10219,6 @@ function resolvers.expand_path_from_var(str)
return file.join_path(resolvers.expanded_path_list_from_var(str))
end
-function resolvers.format_of_var(str)
- return formats[str] or formats[alternatives[str]] or ''
-end
-function resolvers.format_of_suffix(str)
- return suffixmap[file.extname(str)] or 'tex'
-end
-
-function resolvers.variable_of_format(str)
- return formats[str] or formats[alternatives[str]] or ''
-end
-
-function resolvers.var_of_format_or_suffix(str)
- local v = formats[str]
- if v then
- return v
- end
- v = formats[alternatives[str]]
- if v then
- return v
- end
- v = suffixmap[file.extname(str)]
- if v then
- return formats[isf]
- end
- return ''
-end
-
function resolvers.expand_braces(str) -- output variable and brace expansion of STRING
local ori = resolvers.variable(str)
local pth = expanded_path_from_list(resolvers.split_path(ori))
@@ -9571,9 +10231,9 @@ function resolvers.isreadable.file(name)
local readable = lfs.isfile(name) -- brrr
if trace_detail then
if readable then
- logs.report("fileio","file '%s' is readable",name)
+ report_resolvers("file '%s' is readable",name)
else
- logs.report("fileio","file '%s' is not readable", name)
+ report_resolvers("file '%s' is not readable", name)
end
end
return readable
@@ -9589,10 +10249,10 @@ local function collect_files(names)
for k=1,#names do
local fname = names[k]
if trace_detail then
- logs.report("fileio","checking name '%s'",fname)
+ report_resolvers("checking name '%s'",fname)
end
- local bname = file.basename(fname)
- local dname = file.dirname(fname)
+ local bname = filebasename(fname)
+ local dname = filedirname(fname)
if dname == "" or find(dname,"^%.") then
dname = false
else
@@ -9605,7 +10265,7 @@ local function collect_files(names)
local files = blobpath and instance.files[blobpath]
if files then
if trace_detail then
- logs.report("fileio","deep checking '%s' (%s)",blobpath,bname)
+ report_resolvers("deep checking '%s' (%s)",blobpath,bname)
end
local blobfile = files[bname]
if not blobfile then
@@ -9617,53 +10277,38 @@ local function collect_files(names)
end
end
if blobfile then
+ local blobroot = files.__path__ or blobpath
if type(blobfile) == 'string' then
if not dname or find(blobfile,dname) then
- filelist[#filelist+1] = {
- hash.type,
- file.join(blobpath,blobfile,bname), -- search
- resolvers.concatinators[hash.type](blobpath,blobfile,bname) -- result
- }
+ local kind = hash.type
+ local search = filejoin(blobpath,blobfile,bname)
+ local result = resolvers.concatinators[hash.type](blobroot,blobfile,bname)
+ if trace_detail then
+ report_resolvers("match: kind '%s', search '%s', result '%s'",kind,search,result)
+ end
+ filelist[#filelist+1] = { kind, search, result }
end
else
for kk=1,#blobfile do
local vv = blobfile[kk]
if not dname or find(vv,dname) then
- filelist[#filelist+1] = {
- hash.type,
- file.join(blobpath,vv,bname), -- search
- resolvers.concatinators[hash.type](blobpath,vv,bname) -- result
- }
+ local kind = hash.type
+ local search = filejoin(blobpath,vv,bname)
+ local result = resolvers.concatinators[hash.type](blobroot,vv,bname)
+ if trace_detail then
+ report_resolvers("match: kind '%s', search '%s', result '%s'",kind,search,result)
+ end
+ filelist[#filelist+1] = { kind, search, result }
end
end
end
end
elseif trace_locating then
- logs.report("fileio","no match in '%s' (%s)",blobpath,bname)
+ report_resolvers("no match in '%s' (%s)",blobpath,bname)
end
end
end
- if #filelist > 0 then
- return filelist
- else
- return nil
- end
-end
-
-function resolvers.suffix_of_format(str)
- if suffixes[str] then
- return suffixes[str][1]
- else
- return ""
- end
-end
-
-function resolvers.suffixes_of_format(str)
- if suffixes[str] then
- return suffixes[str]
- else
- return {}
- end
+ return #filelist > 0 and filelist or nil
end
function resolvers.register_in_trees(name)
@@ -9683,27 +10328,28 @@ local function can_be_dir(name) -- can become local
fakepaths[name] = 2 -- no directory
end
end
- return (fakepaths[name] == 1)
+ return fakepaths[name] == 1
end
local function collect_instance_files(filename,collected) -- todo : plugin (scanners, checkers etc)
local result = collected or { }
local stamp = nil
- filename = file.collapse_path(filename)
+ filename = collapse_path(filename)
-- speed up / beware: format problem
if instance.remember then
stamp = filename .. "--" .. instance.engine .. "--" .. instance.progname .. "--" .. instance.format
if instance.found[stamp] then
if trace_locating then
- logs.report("fileio","remembering file '%s'",filename)
+ report_resolvers("remembering file '%s'",filename)
end
+ resolvers.register_in_trees(filename) -- for tracing used files
return instance.found[stamp]
end
end
if not dangerous[instance.format or "?"] then
if resolvers.isreadable.file(filename) then
if trace_detail then
- logs.report("fileio","file '%s' found directly",filename)
+ report_resolvers("file '%s' found directly",filename)
end
instance.found[stamp] = { filename }
return { filename }
@@ -9711,36 +10357,39 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
end
if find(filename,'%*') then
if trace_locating then
- logs.report("fileio","checking wildcard '%s'", filename)
+ report_resolvers("checking wildcard '%s'", filename)
end
result = resolvers.find_wildcard_files(filename)
elseif file.is_qualified_path(filename) then
if resolvers.isreadable.file(filename) then
if trace_locating then
- logs.report("fileio","qualified name '%s'", filename)
+ report_resolvers("qualified name '%s'", filename)
end
result = { filename }
else
- local forcedname, ok, suffix = "", false, file.extname(filename)
+ local forcedname, ok, suffix = "", false, fileextname(filename)
if suffix == "" then -- why
if instance.format == "" then
forcedname = filename .. ".tex"
if resolvers.isreadable.file(forcedname) then
if trace_locating then
- logs.report("fileio","no suffix, forcing standard filetype 'tex'")
+ report_resolvers("no suffix, forcing standard filetype 'tex'")
end
result, ok = { forcedname }, true
end
else
- local suffixes = resolvers.suffixes_of_format(instance.format)
- for _, s in next, suffixes do
- forcedname = filename .. "." .. s
- if resolvers.isreadable.file(forcedname) then
- if trace_locating then
- logs.report("fileio","no suffix, forcing format filetype '%s'", s)
+ local format_suffixes = suffixes[instance.format]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s = format_suffixes[i]
+ forcedname = filename .. "." .. s
+ if resolvers.isreadable.file(forcedname) then
+ if trace_locating then
+ report_resolvers("no suffix, forcing format filetype '%s'", s)
+ end
+ result, ok = { forcedname }, true
+ break
end
- result, ok = { forcedname }, true
- break
end
end
end
@@ -9748,7 +10397,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
if not ok and suffix ~= "" then
-- try to find in tree (no suffix manipulation), here we search for the
-- matching last part of the name
- local basename = file.basename(filename)
+ local basename = filebasename(filename)
local pattern = gsub(filename .. "$","([%.%-])","%%%1")
local savedformat = instance.format
local format = savedformat or ""
@@ -9789,12 +10438,13 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
-- end
end
if not ok and trace_locating then
- logs.report("fileio","qualified name '%s'", filename)
+ report_resolvers("qualified name '%s'", filename)
end
end
else
-- search spec
- local filetype, extra, done, wantedfiles, ext = '', nil, false, { }, file.extname(filename)
+ local filetype, extra, done, wantedfiles, ext = '', nil, false, { }, fileextname(filename)
+ -- tricky as filename can be bla.1.2.3
if ext == "" then
if not instance.force_suffixes then
wantedfiles[#wantedfiles+1] = filename
@@ -9803,29 +10453,31 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
wantedfiles[#wantedfiles+1] = filename
end
if instance.format == "" then
- if ext == "" then
+ if ext == "" or not suffixmap[ext] then
local forcedname = filename .. '.tex'
wantedfiles[#wantedfiles+1] = forcedname
filetype = resolvers.format_of_suffix(forcedname)
if trace_locating then
- logs.report("fileio","forcing filetype '%s'",filetype)
+ report_resolvers("forcing filetype '%s'",filetype)
end
else
filetype = resolvers.format_of_suffix(filename)
if trace_locating then
- logs.report("fileio","using suffix based filetype '%s'",filetype)
+ report_resolvers("using suffix based filetype '%s'",filetype)
end
end
else
- if ext == "" then
- local suffixes = resolvers.suffixes_of_format(instance.format)
- for _, s in next, suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. s
+ if ext == "" or not suffixmap[ext] then
+ local format_suffixes = suffixes[instance.format]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ end
end
end
filetype = instance.format
if trace_locating then
- logs.report("fileio","using given filetype '%s'",filetype)
+ report_resolvers("using given filetype '%s'",filetype)
end
end
local typespec = resolvers.variable_of_format(filetype)
@@ -9833,13 +10485,13 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
if not pathlist or #pathlist == 0 then
-- no pathlist, access check only / todo == wildcard
if trace_detail then
- logs.report("fileio","checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ report_resolvers("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
end
for k=1,#wantedfiles do
local fname = wantedfiles[k]
if fname and resolvers.isreadable.file(fname) then
filename, done = fname, true
- result[#result+1] = file.join('.',fname)
+ result[#result+1] = filejoin('.',fname)
break
end
end
@@ -9857,11 +10509,11 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
local dirlist = { }
if filelist then
for i=1,#filelist do
- dirlist[i] = file.dirname(filelist[i][2]) .. "/"
+ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
end
end
if trace_detail then
- logs.report("fileio","checking filename '%s'",filename)
+ report_resolvers("checking filename '%s'",filename)
end
-- a bit messy ... esp the doscan setting here
local doscan
@@ -9884,7 +10536,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
expression = gsub(expression,"//", '/.-/') -- not ok for /// but harmless
expression = "^" .. expression .. "$"
if trace_detail then
- logs.report("fileio","using pattern '%s' for path '%s'",expression,pathname)
+ report_resolvers("using pattern '%s' for path '%s'",expression,pathname)
end
for k=1,#filelist do
local fl = filelist[k]
@@ -9893,20 +10545,19 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
if find(d,expression) then
--- todo, test for readable
result[#result+1] = fl[3]
- resolvers.register_in_trees(f) -- for tracing used files
done = true
if instance.allresults then
if trace_detail then
- logs.report("fileio","match in hash for file '%s' on path '%s', continue scanning",f,d)
+ report_resolvers("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
end
else
if trace_detail then
- logs.report("fileio","match in hash for file '%s' on path '%s', quit scanning",f,d)
+ report_resolvers("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
end
break
end
elseif trace_detail then
- logs.report("fileio","no match in hash for file '%s' on path '%s'",f,d)
+ report_resolvers("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
end
end
end
@@ -9919,10 +10570,10 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
if can_be_dir(ppname) then
for k=1,#wantedfiles do
local w = wantedfiles[k]
- local fname = file.join(ppname,w)
+ local fname = filejoin(ppname,w)
if resolvers.isreadable.file(fname) then
if trace_detail then
- logs.report("fileio","found '%s' by scanning",fname)
+ report_resolvers("found '%s' by scanning",fname)
end
result[#result+1] = fname
done = true
@@ -9936,14 +10587,16 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
end
end
if not done and doscan then
- -- todo: slow path scanning
+ -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF
end
if done and not instance.allresults then break end
end
end
end
for k=1,#result do
- result[k] = file.collapse_path(result[k])
+ local rk = collapse_path(result[k])
+ result[k] = rk
+ resolvers.register_in_trees(rk) -- for tracing used files
end
if instance.remember then
instance.found[stamp] = result
@@ -9953,7 +10606,7 @@ end
if not resolvers.concatinators then resolvers.concatinators = { } end
-resolvers.concatinators.tex = file.join
+resolvers.concatinators.tex = filejoin
resolvers.concatinators.file = resolvers.concatinators.tex
function resolvers.find_files(filename,filetype,mustexist)
@@ -9980,8 +10633,14 @@ function resolvers.find_file(filename,filetype,mustexist)
return (resolvers.find_files(filename,filetype,mustexist)[1] or "")
end
+function resolvers.find_path(filename,filetype)
+ local path = resolvers.find_files(filename,filetype)[1] or ""
+ -- todo return current path
+ return file.dirname(path)
+end
+
function resolvers.find_given_files(filename)
- local bname, result = file.basename(filename), { }
+ local bname, result = filebasename(filename), { }
local hashes = instance.hashes
for k=1,#hashes do
local hash = hashes[k]
@@ -10038,9 +10697,9 @@ local function doit(path,blist,bname,tag,kind,result,allresults)
return done
end
-function resolvers.find_wildcard_files(filename) -- todo: remap:
+function resolvers.find_wildcard_files(filename) -- todo: remap: and lpeg
local result = { }
- local bname, dname = file.basename(filename), file.dirname(filename)
+ local bname, dname = filebasename(filename), filedirname(filename)
local path = gsub(dname,"^*/","")
path = gsub(path,"*",".*")
path = gsub(path,"-","%%-")
@@ -10093,24 +10752,24 @@ end
function resolvers.load(option)
statistics.starttiming(instance)
- resolvers.resetconfig()
- resolvers.identify_cnf()
- resolvers.load_lua() -- will become the new method
- resolvers.expand_variables()
- resolvers.load_cnf() -- will be skipped when we have a lua file
+ identify_configuration_files()
+ load_configuration_files()
+ collapse_configuration_data()
resolvers.expand_variables()
if option ~= "nofiles" then
- resolvers.load_hash()
+ load_databases()
resolvers.automount()
end
statistics.stoptiming(instance)
+ local files = instance.files
+ return files and next(files) and true
end
function resolvers.for_files(command, files, filetype, mustexist)
if files and #files > 0 then
local function report(str)
if trace_locating then
- logs.report("fileio",str) -- has already verbose
+ report_resolvers(str) -- has already verbose
else
print(str)
end
@@ -10158,51 +10817,6 @@ function resolvers.register_file(files, name, path)
end
end
-function resolvers.splitmethod(filename)
- if not filename then
- return { } -- safeguard
- elseif type(filename) == "table" then
- return filename -- already split
- elseif not find(filename,"://") then
- return { scheme="file", path = filename, original=filename } -- quick hack
- else
- return url.hashed(filename)
- end
-end
-
-function table.sequenced(t,sep) -- temp here
- local s = { }
- for k, v in next, t do -- indexed?
- s[#s+1] = k .. "=" .. tostring(v)
- end
- return concat(s, sep or " | ")
-end
-
-function resolvers.methodhandler(what, filename, filetype) -- ...
- filename = file.collapse_path(filename)
- local specification = (type(filename) == "string" and resolvers.splitmethod(filename)) or filename -- no or { }, let it bomb
- local scheme = specification.scheme
- if resolvers[what][scheme] then
- if trace_locating then
- logs.report("fileio","handler '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification))
- end
- return resolvers[what][scheme](filename,filetype) -- todo: specification
- else
- return resolvers[what].tex(filename,filetype) -- todo: specification
- end
-end
-
-function resolvers.clean_path(str)
- if str then
- str = gsub(str,"\\","/")
- str = gsub(str,"^!+","")
- str = gsub(str,"^~",resolvers.homedir)
- return str
- else
- return nil
- end
-end
-
function resolvers.do_with_path(name,func)
local pathlist = resolvers.expanded_path_list(name)
for i=1,#pathlist do
@@ -10214,45 +10828,13 @@ function resolvers.do_with_var(name,func)
func(expanded_var(name))
end
-function resolvers.with_files(pattern,handle)
- local hashes = instance.hashes
- for i=1,#hashes do
- local hash = hashes[i]
- local blobpath = hash.tag
- local blobtype = hash.type
- if blobpath then
- local files = instance.files[blobpath]
- if files then
- for k,v in next, files do
- if find(k,"^remap:") then
- k = files[k]
- v = files[k] -- chained
- end
- if find(k,pattern) then
- if type(v) == "string" then
- handle(blobtype,blobpath,v,k)
- else
- for _,vv in next, v do -- indexed
- handle(blobtype,blobpath,vv,k)
- end
- end
- end
- end
- end
- end
- end
-end
-
function resolvers.locate_format(name)
- local barename, fmtname = gsub(name,"%.%a+$",""), ""
- if resolvers.usecache then
- local path = file.join(caches.setpath("formats")) -- maybe platform
- fmtname = file.join(path,barename..".fmt") or ""
- end
+ local barename = gsub(name,"%.%a+$","")
+ local fmtname = caches.getfirstreadablefile(barename..".fmt","formats") or ""
if fmtname == "" then
fmtname = resolvers.find_files(barename..".fmt")[1] or ""
+ fmtname = resolvers.clean_path(fmtname)
end
- fmtname = resolvers.clean_path(fmtname)
if fmtname ~= "" then
local barename = file.removesuffix(fmtname)
local luaname, lucname, luiname = barename .. ".lua", barename .. ".luc", barename .. ".lui"
@@ -10277,196 +10859,48 @@ function resolvers.boolean_variable(str,default)
end
end
-texconfig.kpse_init = false
-
-kpse = { original = kpse } setmetatable(kpse, { __index = function(k,v) return resolvers[v] end } )
-
--- for a while
-
-input = resolvers
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['data-tmp'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-<p>This module deals with caching data. It sets up the paths and
-implements loaders and savers for tables. Best is to set the
-following variable. When not set, the usual paths will be
-checked. Personally I prefer the (users) temporary path.</p>
-
-</code>
-TEXMFCACHE=$TMP;$TEMP;$TMPDIR;$TEMPDIR;$HOME;$TEXMFVAR;$VARTEXMF;.
-</code>
-
-<p>Currently we do no locking when we write files. This is no real
-problem because most caching involves fonts and the chance of them
-being written at the same time is small. We also need to extend
-luatools with a recache feature.</p>
---ldx]]--
-
-local format, lower, gsub = string.format, string.lower, string.gsub
-
-local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) -- not used yet
-
-caches = caches or { }
-
-caches.path = caches.path or nil
-caches.base = caches.base or "luatex-cache"
-caches.more = caches.more or "context"
-caches.direct = false -- true is faster but may need huge amounts of memory
-caches.tree = false
-caches.paths = caches.paths or nil
-caches.force = false
-caches.defaults = { "TEXMFCACHE", "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" }
-
-function caches.temp()
- local cachepath = nil
- local function check(list,isenv)
- if not cachepath then
- for k=1,#list do
- local v = list[k]
- cachepath = (isenv and (os.env[v] or "")) or v or ""
- if cachepath == "" then
- -- next
- else
- cachepath = resolvers.clean_path(cachepath)
- if lfs.isdir(cachepath) and file.iswritable(cachepath) then -- lfs.attributes(cachepath,"mode") == "directory"
- break
- elseif caches.force or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
- dir.mkdirs(cachepath)
- if lfs.isdir(cachepath) and file.iswritable(cachepath) then
- break
+function resolvers.with_files(pattern,handle,before,after) -- can be a nice iterator instead
+ local instance = resolvers.instance
+ local hashes = instance.hashes
+ for i=1,#hashes do
+ local hash = hashes[i]
+ local blobtype = hash.type
+ local blobpath = hash.tag
+ if blobpath then
+ if before then
+ before(blobtype,blobpath,pattern)
+ end
+ local files = instance.files[blobpath]
+ local total, checked, done = 0, 0, 0
+ if files then
+ for k,v in next, files do
+ total = total + 1
+ if find(k,"^remap:") then
+ k = files[k]
+ v = k -- files[k] -- chained
+ end
+ if find(k,pattern) then
+ if type(v) == "string" then
+ checked = checked + 1
+ if handle(blobtype,blobpath,v,k) then
+ done = done + 1
+ end
+ else
+ checked = checked + #v
+ for i=1,#v do
+ if handle(blobtype,blobpath,v[i],k) then
+ done = done + 1
+ end
+ end
end
end
end
- cachepath = nil
+ end
+ if after then
+ after(blobtype,blobpath,pattern,total,checked,done)
end
end
end
- check(resolvers.clean_path_list("TEXMFCACHE") or { })
- check(caches.defaults,true)
- if not cachepath then
- print("\nfatal error: there is no valid (writable) cache path defined\n")
- os.exit()
- elseif not lfs.isdir(cachepath) then -- lfs.attributes(cachepath,"mode") ~= "directory"
- print(format("\nfatal error: cache path %s is not a directory\n",cachepath))
- os.exit()
- end
- cachepath = file.collapse_path(cachepath)
- function caches.temp()
- return cachepath
- end
- return cachepath
-end
-
-function caches.configpath()
- return table.concat(resolvers.instance.cnffiles,";")
-end
-
-function caches.hashed(tree)
- return md5.hex(gsub(lower(tree),"[\\\/]+","/"))
-end
-
-function caches.treehash()
- local tree = caches.configpath()
- if not tree or tree == "" then
- return false
- else
- return caches.hashed(tree)
- end
-end
-
-function caches.setpath(...)
- if not caches.path then
- if not caches.path then
- caches.path = caches.temp()
- end
- caches.path = resolvers.clean_path(caches.path) -- to be sure
- caches.tree = caches.tree or caches.treehash()
- if caches.tree then
- caches.path = dir.mkdirs(caches.path,caches.base,caches.more,caches.tree)
- else
- caches.path = dir.mkdirs(caches.path,caches.base,caches.more)
- end
- end
- if not caches.path then
- caches.path = '.'
- end
- caches.path = resolvers.clean_path(caches.path)
- local dirs = { ... }
- if #dirs > 0 then
- local pth = dir.mkdirs(caches.path,...)
- return pth
- end
- caches.path = dir.expand_name(caches.path)
- return caches.path
-end
-
-function caches.definepath(category,subcategory)
- return function()
- return caches.setpath(category,subcategory)
- end
-end
-
-function caches.setluanames(path,name)
- return path .. "/" .. name .. ".tma", path .. "/" .. name .. ".tmc"
-end
-
-function caches.loaddata(path,name)
- local tmaname, tmcname = caches.setluanames(path,name)
- local loader = loadfile(tmcname) or loadfile(tmaname)
- if loader then
- loader = loader()
- collectgarbage("step")
- return loader
- else
- return false
- end
-end
-
---~ function caches.loaddata(path,name)
---~ local tmaname, tmcname = caches.setluanames(path,name)
---~ return dofile(tmcname) or dofile(tmaname)
---~ end
-
-function caches.iswritable(filepath,filename)
- local tmaname, tmcname = caches.setluanames(filepath,filename)
- return file.iswritable(tmaname)
-end
-
-function caches.savedata(filepath,filename,data,raw)
- local tmaname, tmcname = caches.setluanames(filepath,filename)
- local reduce, simplify = true, true
- if raw then
- reduce, simplify = false, false
- end
- data.cache_uuid = os.uuid()
- if caches.direct then
- file.savedata(tmaname, table.serialize(data,'return',false,true,false)) -- no hex
- else
- table.tofile(tmaname, data,'return',false,true,false) -- maybe not the last true
- end
- local cleanup = resolvers.boolean_variable("PURGECACHE", false)
- local strip = resolvers.boolean_variable("LUACSTRIP", true)
- utils.lua.compile(tmaname, tmcname, cleanup, strip)
-end
-
--- here we use the cache for format loading (texconfig.[formatname|jobname])
-
---~ if tex and texconfig and texconfig.formatname and texconfig.formatname == "" then
-if tex and texconfig and (not texconfig.formatname or texconfig.formatname == "") and input and resolvers.instance then
- if not texconfig.luaname then texconfig.luaname = "cont-en.lua" end -- or luc
- texconfig.formatname = caches.setpath("formats") .. "/" .. gsub(texconfig.luaname,"%.lu.$",".fmt")
end
@@ -10474,7 +10908,7 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-res'] = {
+if not modules then modules = { } end modules ['data-pre'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -10482,14 +10916,15 @@ if not modules then modules = { } end modules ['data-res'] = {
license = "see context related readme files"
}
---~ print(resolvers.resolve("abc env:tmp file:cont-en.tex path:cont-en.tex full:cont-en.tex rel:zapf/one/p-chars.tex"))
local upper, lower, gsub = string.upper, string.lower, string.gsub
local prefixes = { }
-prefixes.environment = function(str)
- return resolvers.clean_path(os.getenv(str) or os.getenv(upper(str)) or os.getenv(lower(str)) or "")
+local getenv = resolvers.getenv
+
+prefixes.environment = function(str) -- getenv is case insensitive anyway
+ return resolvers.clean_path(getenv(str) or getenv(upper(str)) or getenv(lower(str)) or "")
end
prefixes.relative = function(str,n)
@@ -10627,7 +11062,7 @@ end -- of closure
do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['data-con'] = {
- version = 1.001,
+ version = 1.100,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
@@ -10657,46 +11092,58 @@ containers = containers or { }
containers.usecache = true
+local report_cache = logs.new("cache")
+
local function report(container,tag,name)
if trace_cache or trace_containers then
- logs.report(format("%s cache",container.subcategory),"%s: %s",tag,name or 'invalid')
+ report_cache("container: %s, tag: %s, name: %s",container.subcategory,tag,name or 'invalid')
end
end
local allocated = { }
--- tracing
+local mt = {
+ __index = function(t,k)
+ if k == "writable" then
+ local writable = caches.getwritablepath(t.category,t.subcategory) or { "." }
+ t.writable = writable
+ return writable
+ elseif k == "readables" then
+ local readables = caches.getreadablepaths(t.category,t.subcategory) or { "." }
+ t.readables = readables
+ return readables
+ end
+ end
+}
function containers.define(category, subcategory, version, enabled)
- return function()
- if category and subcategory then
- local c = allocated[category]
- if not c then
- c = { }
- allocated[category] = c
- end
- local s = c[subcategory]
- if not s then
- s = {
- category = category,
- subcategory = subcategory,
- storage = { },
- enabled = enabled,
- version = version or 1.000,
- trace = false,
- path = caches and caches.setpath and caches.setpath(category,subcategory),
- }
- c[subcategory] = s
- end
- return s
- else
- return nil
+ if category and subcategory then
+ local c = allocated[category]
+ if not c then
+ c = { }
+ allocated[category] = c
+ end
+ local s = c[subcategory]
+ if not s then
+ s = {
+ category = category,
+ subcategory = subcategory,
+ storage = { },
+ enabled = enabled,
+ version = version or math.pi, -- after all, this is TeX
+ trace = false,
+ -- writable = caches.getwritablepath and caches.getwritablepath (category,subcategory) or { "." },
+ -- readables = caches.getreadablepaths and caches.getreadablepaths(category,subcategory) or { "." },
+ }
+ setmetatable(s,mt)
+ c[subcategory] = s
end
+ return s
end
end
function containers.is_usable(container, name)
- return container.enabled and caches and caches.iswritable(container.path, name)
+ return container.enabled and caches and caches.iswritable(container.writable, name)
end
function containers.is_valid(container, name)
@@ -10709,18 +11156,20 @@ function containers.is_valid(container, name)
end
function containers.read(container,name)
- if container.enabled and caches and not container.storage[name] and containers.usecache then
- container.storage[name] = caches.loaddata(container.path,name)
- if containers.is_valid(container,name) then
+ local storage = container.storage
+ local stored = storage[name]
+ if not stored and container.enabled and caches and containers.usecache then
+ stored = caches.loaddata(container.readables,name)
+ if stored and stored.cache_version == container.version then
report(container,"loaded",name)
else
- container.storage[name] = nil
+ stored = nil
end
- end
- if container.storage[name] then
+ storage[name] = stored
+ elseif stored then
report(container,"reusing",name)
end
- return container.storage[name]
+ return stored
end
function containers.write(container, name, data)
@@ -10729,7 +11178,7 @@ function containers.write(container, name, data)
if container.enabled and caches then
local unique, shared = data.unique, data.shared
data.unique, data.shared = nil, nil
- caches.savedata(container.path, name, data)
+ caches.savedata(container.writable, name, data)
report(container,"saved",name)
data.unique, data.shared = unique, shared
end
@@ -10764,41 +11213,7 @@ local format, lower, gsub, find = string.format, string.lower, string.gsub, stri
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
--- since we want to use the cache instead of the tree, we will now
--- reimplement the saver.
-
-local save_data = resolvers.save_data
-local load_data = resolvers.load_data
-
-resolvers.cachepath = nil -- public, for tracing
-resolvers.usecache = true -- public, for tracing
-
-function resolvers.save_data(dataname)
- save_data(dataname, function(cachename,dataname)
- resolvers.usecache = not toboolean(resolvers.expansion("CACHEINTDS") or "false",true)
- if resolvers.usecache then
- resolvers.cachepath = resolvers.cachepath or caches.definepath("trees")
- return file.join(resolvers.cachepath(),caches.hashed(cachename))
- else
- return file.join(cachename,dataname)
- end
- end)
-end
-
-function resolvers.load_data(pathname,dataname,filename)
- load_data(pathname,dataname,filename,function(dataname,filename)
- resolvers.usecache = not toboolean(resolvers.expansion("CACHEINTDS") or "false",true)
- if resolvers.usecache then
- resolvers.cachepath = resolvers.cachepath or caches.definepath("trees")
- return file.join(resolvers.cachepath(),caches.hashed(pathname))
- else
- if not filename or (filename == "") then
- filename = dataname
- end
- return file.join(pathname,filename)
- end
- end)
-end
+local report_resolvers = logs.new("resolvers")
-- we will make a better format, maybe something xml or just text or lua
@@ -10807,7 +11222,7 @@ resolvers.automounted = resolvers.automounted or { }
function resolvers.automount(usecache)
local mountpaths = resolvers.clean_path_list(resolvers.expansion('TEXMFMOUNT'))
if (not mountpaths or #mountpaths == 0) and usecache then
- mountpaths = { caches.setpath("mount") }
+ mountpaths = caches.getreadablepaths("mount")
end
if mountpaths and #mountpaths > 0 then
statistics.starttiming(resolvers.instance)
@@ -10821,7 +11236,7 @@ function resolvers.automount(usecache)
-- skip
elseif find(line,"^zip://") then
if trace_locating then
- logs.report("fileio","mounting %s",line)
+ report_resolvers("mounting %s",line)
end
table.insert(resolvers.automounted,line)
resolvers.usezipfile(line)
@@ -10837,8 +11252,8 @@ end
-- status info
-statistics.register("used config path", function() return caches.configpath() end)
-statistics.register("used cache path", function() return caches.temp() or "?" end)
+statistics.register("used config file", function() return caches.configfiles() end)
+statistics.register("used cache path", function() return caches.usedpaths() end)
-- experiment (code will move)
@@ -10866,11 +11281,11 @@ function statistics.check_fmt_status(texname)
local sourcehash = md5.hex(io.loaddata(resolvers.find_file(luv.sourcefile)) or "unknown")
local luvbanner = luv.enginebanner or "?"
if luvbanner ~= enginebanner then
- return string.format("engine mismatch (luv:%s <> bin:%s)",luvbanner,enginebanner)
+ return format("engine mismatch (luv: %s <> bin: %s)",luvbanner,enginebanner)
end
local luvhash = luv.sourcehash or "?"
if luvhash ~= sourcehash then
- return string.format("source mismatch (luv:%s <> bin:%s)",luvhash,sourcehash)
+ return format("source mismatch (luv: %s <> bin: %s)",luvhash,sourcehash)
end
else
return "invalid status file"
@@ -10900,6 +11315,8 @@ local unpack = unpack or table.unpack
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local report_resolvers = logs.new("resolvers")
+
-- zip:///oeps.zip?name=bla/bla.tex
-- zip:///oeps.zip?tree=tex/texmf-local
-- zip:///texmf.zip?tree=/tex/texmf
@@ -10950,16 +11367,16 @@ function locators.zip(specification) -- where is this used? startup zips (untest
local zfile = zip.openarchive(name) -- tricky, could be in to be initialized tree
if trace_locating then
if zfile then
- logs.report("fileio","zip locator, archive '%s' found",specification.original)
+ report_resolvers("zip locator, archive '%s' found",specification.original)
else
- logs.report("fileio","zip locator, archive '%s' not found",specification.original)
+ report_resolvers("zip locator, archive '%s' not found",specification.original)
end
end
end
function hashers.zip(tag,name)
if trace_locating then
- logs.report("fileio","loading zip file '%s' as '%s'",name,tag)
+ report_resolvers("loading zip file '%s' as '%s'",name,tag)
end
resolvers.usezipfile(format("%s?tree=%s",tag,name))
end
@@ -10984,25 +11401,25 @@ function finders.zip(specification,filetype)
local zfile = zip.openarchive(specification.path)
if zfile then
if trace_locating then
- logs.report("fileio","zip finder, archive '%s' found",specification.path)
+ report_resolvers("zip finder, archive '%s' found",specification.path)
end
local dfile = zfile:open(q.name)
if dfile then
dfile = zfile:close()
if trace_locating then
- logs.report("fileio","zip finder, file '%s' found",q.name)
+ report_resolvers("zip finder, file '%s' found",q.name)
end
return specification.original
elseif trace_locating then
- logs.report("fileio","zip finder, file '%s' not found",q.name)
+ report_resolvers("zip finder, file '%s' not found",q.name)
end
elseif trace_locating then
- logs.report("fileio","zip finder, unknown archive '%s'",specification.path)
+ report_resolvers("zip finder, unknown archive '%s'",specification.path)
end
end
end
if trace_locating then
- logs.report("fileio","zip finder, '%s' not found",filename)
+ report_resolvers("zip finder, '%s' not found",filename)
end
return unpack(finders.notfound)
end
@@ -11015,25 +11432,25 @@ function openers.zip(specification)
local zfile = zip.openarchive(zipspecification.path)
if zfile then
if trace_locating then
- logs.report("fileio","zip opener, archive '%s' opened",zipspecification.path)
+ report_resolvers("zip opener, archive '%s' opened",zipspecification.path)
end
local dfile = zfile:open(q.name)
if dfile then
logs.show_open(specification)
if trace_locating then
- logs.report("fileio","zip opener, file '%s' found",q.name)
+ report_resolvers("zip opener, file '%s' found",q.name)
end
return openers.text_opener(specification,dfile,'zip')
elseif trace_locating then
- logs.report("fileio","zip opener, file '%s' not found",q.name)
+ report_resolvers("zip opener, file '%s' not found",q.name)
end
elseif trace_locating then
- logs.report("fileio","zip opener, unknown archive '%s'",zipspecification.path)
+ report_resolvers("zip opener, unknown archive '%s'",zipspecification.path)
end
end
end
if trace_locating then
- logs.report("fileio","zip opener, '%s' not found",filename)
+ report_resolvers("zip opener, '%s' not found",filename)
end
return unpack(openers.notfound)
end
@@ -11046,27 +11463,27 @@ function loaders.zip(specification)
local zfile = zip.openarchive(specification.path)
if zfile then
if trace_locating then
- logs.report("fileio","zip loader, archive '%s' opened",specification.path)
+ report_resolvers("zip loader, archive '%s' opened",specification.path)
end
local dfile = zfile:open(q.name)
if dfile then
logs.show_load(filename)
if trace_locating then
- logs.report("fileio","zip loader, file '%s' loaded",filename)
+ report_resolvers("zip loader, file '%s' loaded",filename)
end
local s = dfile:read("*all")
dfile:close()
return true, s, #s
elseif trace_locating then
- logs.report("fileio","zip loader, file '%s' not found",q.name)
+ report_resolvers("zip loader, file '%s' not found",q.name)
end
elseif trace_locating then
- logs.report("fileio","zip loader, unknown archive '%s'",specification.path)
+ report_resolvers("zip loader, unknown archive '%s'",specification.path)
end
end
end
if trace_locating then
- logs.report("fileio","zip loader, '%s' not found",filename)
+ report_resolvers("zip loader, '%s' not found",filename)
end
return unpack(openers.notfound)
end
@@ -11084,7 +11501,7 @@ function resolvers.usezipfile(zipname)
if z then
local instance = resolvers.instance
if trace_locating then
- logs.report("fileio","zip registering, registering archive '%s'",zipname)
+ report_resolvers("zip registering, registering archive '%s'",zipname)
end
statistics.starttiming(instance)
resolvers.prepend_hash('zip',zipname,zipfile)
@@ -11093,10 +11510,10 @@ function resolvers.usezipfile(zipname)
instance.files[zipname] = resolvers.register_zip_file(z,tree or "")
statistics.stoptiming(instance)
elseif trace_locating then
- logs.report("fileio","zip registering, unknown archive '%s'",zipname)
+ report_resolvers("zip registering, unknown archive '%s'",zipname)
end
elseif trace_locating then
- logs.report("fileio","zip registering, '%s' not found",zipname)
+ report_resolvers("zip registering, '%s' not found",zipname)
end
end
@@ -11108,7 +11525,7 @@ function resolvers.register_zip_file(z,tree)
filter = format("^%s/(.+)/(.-)$",tree)
end
if trace_locating then
- logs.report("fileio","zip registering, using filter '%s'",filter)
+ report_resolvers("zip registering, using filter '%s'",filter)
end
local register, n = resolvers.register_file, 0
for i in z:files() do
@@ -11125,7 +11542,7 @@ function resolvers.register_zip_file(z,tree)
n = n + 1
end
end
- logs.report("fileio","zip registering, %s files registered",n)
+ report_resolvers("zip registering, %s files registered",n)
return files
end
@@ -11134,6 +11551,93 @@ end -- of closure
do -- create closure to overcome 200 locals limit
+if not modules then modules = { } end modules ['data-tre'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- \input tree://oeps1/**/oeps.tex
+
+local find, gsub, format = string.find, string.gsub, string.format
+local unpack = unpack or table.unpack
+
+local report_resolvers = logs.new("resolvers")
+
+local done, found, notfound = { }, { }, resolvers.finders.notfound
+
+function resolvers.finders.tree(specification,filetype)
+ local fnd = found[specification]
+ if not fnd then
+ local spec = resolvers.splitmethod(specification).path or ""
+ if spec ~= "" then
+ local path, name = file.dirname(spec), file.basename(spec)
+ if path == "" then path = "." end
+ local hash = done[path]
+ if not hash then
+ local pattern = path .. "/*" -- we will use the proper splitter
+ hash = dir.glob(pattern)
+ done[path] = hash
+ end
+ local pattern = "/" .. gsub(name,"([%.%-%+])", "%%%1") .. "$"
+ for k=1,#hash do
+ local v = hash[k]
+ if find(v,pattern) then
+ found[specification] = v
+ return v
+ end
+ end
+ end
+ fnd = unpack(notfound) -- unpack ? why not just notfound[1]
+ found[specification] = fnd
+ end
+ return fnd
+end
+
+function resolvers.locators.tree(specification)
+ local spec = resolvers.splitmethod(specification)
+ local path = spec.path
+ if path ~= '' and lfs.isdir(path) then
+ if trace_locating then
+ report_resolvers("tree locator '%s' found (%s)",path,specification)
+ end
+ resolvers.append_hash('tree',specification,path,false) -- don't cache
+ elseif trace_locating then
+ report_resolvers("tree locator '%s' not found",path)
+ end
+end
+
+function resolvers.hashers.tree(tag,name)
+ if trace_locating then
+ report_resolvers("analysing tree '%s' as '%s'",name,tag)
+ end
+ -- todo: maybe share with done above
+ local spec = resolvers.splitmethod(tag)
+ local path = spec.path
+ resolvers.generators.tex(path,tag) -- we share this with the normal tree analyzer
+end
+
+function resolvers.generators.tree(tag)
+ local spec = resolvers.splitmethod(tag)
+ local path = spec.path
+ resolvers.generators.tex(path,tag) -- we share this with the normal tree analyzer
+end
+
+function resolvers.concatinators.tree(tag,path,name)
+ return file.join(tag,path,name)
+end
+
+resolvers.isreadable.tree = file.isreadable
+resolvers.openers.tree = resolvers.openers.generic
+resolvers.loaders.tree = resolvers.loaders.generic
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
if not modules then modules = { } end modules ['data-crl'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -11142,32 +11646,31 @@ if not modules then modules = { } end modules ['data-crl'] = {
license = "see context related readme files"
}
-local gsub = string.gsub
+-- this one is replaced by data-sch.lua --
curl = curl or { }
-curl.cached = { }
-curl.cachepath = caches.definepath("curl")
-
+local gsub = string.gsub
local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
-function curl.fetch(protocol, name)
- local cachename = curl.cachepath() .. "/" .. gsub(name,"[^%a%d%.]+","-")
--- cachename = gsub(cachename,"[\\/]", io.fileseparator)
- cachename = gsub(cachename,"[\\]", "/") -- cleanup
- if not curl.cached[name] then
+local cached = { }
+
+function curl.fetch(protocol, name) -- todo: use socket library
+ local cleanname = gsub(name,"[^%a%d%.]+","-")
+ local cachename = caches.setfirstwritablefile(cleanname,"curl")
+ if not cached[name] then
if not io.exists(cachename) then
- curl.cached[name] = cachename
+ cached[name] = cachename
local command = "curl --silent --create-dirs --output " .. cachename .. " " .. name -- no protocol .. "://"
os.spawn(command)
end
if io.exists(cachename) then
- curl.cached[name] = cachename
+ cached[name] = cachename
else
- curl.cached[name] = ""
+ cached[name] = ""
end
end
- return curl.cached[name]
+ return cached[name]
end
function finders.curl(protocol,filename)
@@ -11214,6 +11717,8 @@ if not modules then modules = { } end modules ['data-lua'] = {
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local report_resolvers = logs.new("resolvers")
+
local gsub, insert = string.gsub, table.insert
local unpack = unpack or table.unpack
@@ -11242,7 +11747,7 @@ local function thepath(...)
local t = { ... } t[#t+1] = "?.lua"
local path = file.join(unpack(t))
if trace_locating then
- logs.report("fileio","! appending '%s' to 'package.path'",path)
+ report_resolvers("! appending '%s' to 'package.path'",path)
end
return path
end
@@ -11264,11 +11769,11 @@ local function loaded(libpaths,name,simple)
local libpath = libpaths[i]
local resolved = gsub(libpath,"%?",simple)
if trace_locating then -- more detail
- logs.report("fileio","! checking for '%s' on 'package.path': '%s' => '%s'",simple,libpath,resolved)
+ report_resolvers("! checking for '%s' on 'package.path': '%s' => '%s'",simple,libpath,resolved)
end
if resolvers.isreadable.file(resolved) then
if trace_locating then
- logs.report("fileio","! lib '%s' located via 'package.path': '%s'",name,resolved)
+ report_resolvers("! lib '%s' located via 'package.path': '%s'",name,resolved)
end
return loadfile(resolved)
end
@@ -11278,17 +11783,17 @@ end
package.loaders[2] = function(name) -- was [#package.loaders+1]
if trace_locating then -- mode detail
- logs.report("fileio","! locating '%s'",name)
+ report_resolvers("! locating '%s'",name)
end
for i=1,#libformats do
local format = libformats[i]
local resolved = resolvers.find_file(name,format) or ""
if trace_locating then -- mode detail
- logs.report("fileio","! checking for '%s' using 'libformat path': '%s'",name,format)
+ report_resolvers("! checking for '%s' using 'libformat path': '%s'",name,format)
end
if resolved ~= "" then
if trace_locating then
- logs.report("fileio","! lib '%s' located via environment: '%s'",name,resolved)
+ report_resolvers("! lib '%s' located via environment: '%s'",name,resolved)
end
return loadfile(resolved)
end
@@ -11311,11 +11816,11 @@ package.loaders[2] = function(name) -- was [#package.loaders+1]
local path = paths[p]
local resolved = file.join(path,libname)
if trace_locating then -- mode detail
- logs.report("fileio","! checking for '%s' using 'clibformat path': '%s'",libname,path)
+ report_resolvers("! checking for '%s' using 'clibformat path': '%s'",libname,path)
end
if resolvers.isreadable.file(resolved) then
if trace_locating then
- logs.report("fileio","! lib '%s' located via 'clibformat': '%s'",libname,resolved)
+ report_resolvers("! lib '%s' located via 'clibformat': '%s'",libname,resolved)
end
return package.loadlib(resolved,name)
end
@@ -11325,28 +11830,28 @@ package.loaders[2] = function(name) -- was [#package.loaders+1]
local libpath = clibpaths[i]
local resolved = gsub(libpath,"?",simple)
if trace_locating then -- more detail
- logs.report("fileio","! checking for '%s' on 'package.cpath': '%s'",simple,libpath)
+ report_resolvers("! checking for '%s' on 'package.cpath': '%s'",simple,libpath)
end
if resolvers.isreadable.file(resolved) then
if trace_locating then
- logs.report("fileio","! lib '%s' located via 'package.cpath': '%s'",name,resolved)
+ report_resolvers("! lib '%s' located via 'package.cpath': '%s'",name,resolved)
end
return package.loadlib(resolved,name)
end
end
-- just in case the distribution is messed up
if trace_loading then -- more detail
- logs.report("fileio","! checking for '%s' using 'luatexlibs': '%s'",name)
+ report_resolvers("! checking for '%s' using 'luatexlibs': '%s'",name)
end
local resolved = resolvers.find_file(file.basename(name),'luatexlibs') or ""
if resolved ~= "" then
if trace_locating then
- logs.report("fileio","! lib '%s' located by basename via environment: '%s'",name,resolved)
+ report_resolvers("! lib '%s' located by basename via environment: '%s'",name,resolved)
end
return loadfile(resolved)
end
if trace_locating then
- logs.report("fileio",'? unable to locate lib: %s',name)
+ report_resolvers('? unable to locate lib: %s',name)
end
-- return "unable to locate " .. name
end
@@ -11358,113 +11863,6 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['luat-kps'] = {
- version = 1.001,
- comment = "companion to luatools.lua",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-<p>This file is used when we want the input handlers to behave like
-<type>kpsewhich</type>. What to do with the following:</p>
-
-<typing>
-{$SELFAUTOLOC,$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}
-$SELFAUTOLOC : /usr/tex/bin/platform
-$SELFAUTODIR : /usr/tex/bin
-$SELFAUTOPARENT : /usr/tex
-</typing>
-
-<p>How about just forgetting about them?</p>
---ldx]]--
-
-local suffixes = resolvers.suffixes
-local formats = resolvers.formats
-
-suffixes['gf'] = { '<resolution>gf' }
-suffixes['pk'] = { '<resolution>pk' }
-suffixes['base'] = { 'base' }
-suffixes['bib'] = { 'bib' }
-suffixes['bst'] = { 'bst' }
-suffixes['cnf'] = { 'cnf' }
-suffixes['mem'] = { 'mem' }
-suffixes['mf'] = { 'mf' }
-suffixes['mfpool'] = { 'pool' }
-suffixes['mft'] = { 'mft' }
-suffixes['mppool'] = { 'pool' }
-suffixes['graphic/figure'] = { 'eps', 'epsi' }
-suffixes['texpool'] = { 'pool' }
-suffixes['PostScript header'] = { 'pro' }
-suffixes['ist'] = { 'ist' }
-suffixes['web'] = { 'web', 'ch' }
-suffixes['cweb'] = { 'w', 'web', 'ch' }
-suffixes['cmap files'] = { 'cmap' }
-suffixes['lig files'] = { 'lig' }
-suffixes['bitmap font'] = { }
-suffixes['MetaPost support'] = { }
-suffixes['TeX system documentation'] = { }
-suffixes['TeX system sources'] = { }
-suffixes['dvips config'] = { }
-suffixes['type42 fonts'] = { }
-suffixes['web2c files'] = { }
-suffixes['other text files'] = { }
-suffixes['other binary files'] = { }
-suffixes['opentype fonts'] = { 'otf' }
-
-suffixes['fmt'] = { 'fmt' }
-suffixes['texmfscripts'] = { 'rb','lua','py','pl' }
-
-suffixes['pdftex config'] = { }
-suffixes['Troff fonts'] = { }
-
-suffixes['ls-R'] = { }
-
---[[ldx--
-<p>If you wondered abou tsome of the previous mappings, how about
-the next bunch:</p>
---ldx]]--
-
-formats['bib'] = ''
-formats['bst'] = ''
-formats['mft'] = ''
-formats['ist'] = ''
-formats['web'] = ''
-formats['cweb'] = ''
-formats['MetaPost support'] = ''
-formats['TeX system documentation'] = ''
-formats['TeX system sources'] = ''
-formats['Troff fonts'] = ''
-formats['dvips config'] = ''
-formats['graphic/figure'] = ''
-formats['ls-R'] = ''
-formats['other text files'] = ''
-formats['other binary files'] = ''
-
-formats['gf'] = ''
-formats['pk'] = ''
-formats['base'] = 'MFBASES'
-formats['cnf'] = ''
-formats['mem'] = 'MPMEMS'
-formats['mf'] = 'MFINPUTS'
-formats['mfpool'] = 'MFPOOL'
-formats['mppool'] = 'MPPOOL'
-formats['texpool'] = 'TEXPOOL'
-formats['PostScript header'] = 'TEXPSHEADERS'
-formats['cmap files'] = 'CMAPFONTS'
-formats['type42 fonts'] = 'T42FONTS'
-formats['web2c files'] = 'WEB2C'
-formats['pdftex config'] = 'PDFTEXCONFIG'
-formats['texmfscripts'] = 'TEXMFSCRIPTS'
-formats['bitmap font'] = ''
-formats['lig files'] = 'LIGFONTS'
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
if not modules then modules = { } end modules ['data-aux'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -11474,49 +11872,52 @@ if not modules then modules = { } end modules ['data-aux'] = {
}
local find = string.find
+local type, next = type, next
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local report_resolvers = logs.new("resolvers")
+
function resolvers.update_script(oldname,newname) -- oldname -> own.name, not per se a suffix
local scriptpath = "scripts/context/lua"
newname = file.addsuffix(newname,"lua")
local oldscript = resolvers.clean_path(oldname)
if trace_locating then
- logs.report("fileio","to be replaced old script %s", oldscript)
+ report_resolvers("to be replaced old script %s", oldscript)
end
local newscripts = resolvers.find_files(newname) or { }
if #newscripts == 0 then
if trace_locating then
- logs.report("fileio","unable to locate new script")
+ report_resolvers("unable to locate new script")
end
else
for i=1,#newscripts do
local newscript = resolvers.clean_path(newscripts[i])
if trace_locating then
- logs.report("fileio","checking new script %s", newscript)
+ report_resolvers("checking new script %s", newscript)
end
if oldscript == newscript then
if trace_locating then
- logs.report("fileio","old and new script are the same")
+ report_resolvers("old and new script are the same")
end
elseif not find(newscript,scriptpath) then
if trace_locating then
- logs.report("fileio","new script should come from %s",scriptpath)
+ report_resolvers("new script should come from %s",scriptpath)
end
elseif not (find(oldscript,file.removesuffix(newname).."$") or find(oldscript,newname.."$")) then
if trace_locating then
- logs.report("fileio","invalid new script name")
+ report_resolvers("invalid new script name")
end
else
local newdata = io.loaddata(newscript)
if newdata then
if trace_locating then
- logs.report("fileio","old script content replaced by new content")
+ report_resolvers("old script content replaced by new content")
end
io.savedata(oldscript,newdata)
break
elseif trace_locating then
- logs.report("fileio","unable to load new script")
+ report_resolvers("unable to load new script")
end
end
end
@@ -11536,70 +11937,116 @@ if not modules then modules = { } end modules ['data-tmf'] = {
license = "see context related readme files"
}
-local find, gsub, match = string.find, string.gsub, string.match
-local getenv, setenv = os.getenv, os.setenv
+-- = <<
+-- ? ??
+-- < +=
+-- > =+
--- loads *.tmf files in minimal tree roots (to be optimized and documented)
+function resolvers.load_tree(tree)
+ if type(tree) == "string" and tree ~= "" then
-function resolvers.check_environment(tree)
- logs.simpleline()
- setenv('TMP', getenv('TMP') or getenv('TEMP') or getenv('TMPDIR') or getenv('HOME'))
- setenv('TEXOS', getenv('TEXOS') or ("texmf-" .. os.platform))
- setenv('TEXPATH', gsub(tree or "tex","\/+$",''))
- setenv('TEXMFOS', getenv('TEXPATH') .. "/" .. getenv('TEXOS'))
- logs.simpleline()
- logs.simple("preset : TEXPATH => %s", getenv('TEXPATH'))
- logs.simple("preset : TEXOS => %s", getenv('TEXOS'))
- logs.simple("preset : TEXMFOS => %s", getenv('TEXMFOS'))
- logs.simple("preset : TMP => %s", getenv('TMP'))
- logs.simple('')
-end
+ local getenv, setenv = resolvers.getenv, resolvers.setenv
-function resolvers.load_environment(name) -- todo: key=value as well as lua
- local f = io.open(name)
- if f then
- for line in f:lines() do
- if find(line,"^[%%%#]") then
- -- skip comment
- else
- local key, how, value = match(line,"^(.-)%s*([<=>%?]+)%s*(.*)%s*$")
- if how then
- value = gsub(value,"%%(.-)%%", function(v) return getenv(v) or "" end)
- if how == "=" or how == "<<" then
- setenv(key,value)
- elseif how == "?" or how == "??" then
- setenv(key,getenv(key) or value)
- elseif how == "<" or how == "+=" then
- if getenv(key) then
- setenv(key,getenv(key) .. io.fileseparator .. value)
- else
- setenv(key,value)
- end
- elseif how == ">" or how == "=+" then
- if getenv(key) then
- setenv(key,value .. io.pathseparator .. getenv(key))
- else
- setenv(key,value)
- end
- end
- end
- end
+ -- later might listen to the raw osenv var as well
+ local texos = "texmf-" .. os.platform
+
+ local oldroot = environment.texroot
+ local newroot = file.collapse_path(tree)
+
+ local newtree = file.join(newroot,texos)
+ local newpath = file.join(newtree,"bin")
+
+ if not lfs.isdir(newtree) then
+ logs.simple("no '%s' under tree %s",texos,tree)
+ os.exit()
end
- f:close()
+ if not lfs.isdir(newpath) then
+ logs.simple("no '%s/bin' under tree %s",texos,tree)
+ os.exit()
+ end
+
+ local texmfos = newtree
+
+ environment.texroot = newroot
+ environment.texos = texos
+ environment.texmfos = texmfos
+
+ setenv('SELFAUTOPARENT', newroot)
+ setenv('SELFAUTODIR', newtree)
+ setenv('SELFAUTOLOC', newpath)
+ setenv('TEXROOT', newroot)
+ setenv('TEXOS', texos)
+ setenv('TEXMFOS', texmfos)
+ setenv('TEXROOT', newroot)
+ setenv('TEXMFCNF', resolvers.luacnfspec)
+ setenv("PATH", newpath .. io.pathseparator .. getenv("PATH"))
+
+ logs.simple("changing from root '%s' to '%s'",oldroot,newroot)
+ logs.simple("prepending '%s' to binary path",newpath)
+ logs.simple()
end
end
-function resolvers.load_tree(tree)
- if tree and tree ~= "" then
- local setuptex = 'setuptex.tmf'
- if lfs.attributes(tree, "mode") == "directory" then -- check if not nil
- setuptex = tree .. "/" .. setuptex
- else
- setuptex = tree
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['data-lst'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- used in mtxrun
+
+local find, concat, upper, format = string.find, table.concat, string.upper, string.format
+
+resolvers.listers = resolvers.listers or { }
+
+local function tabstr(str)
+ if type(str) == 'table' then
+ return concat(str," | ")
+ else
+ return str
+ end
+end
+
+local function list(list,report,pattern)
+ pattern = pattern and pattern ~= "" and upper(pattern) or ""
+ local instance = resolvers.instance
+ local report = report or texio.write_nl
+ local sorted = table.sortedkeys(list)
+ for i=1,#sorted do
+ local key = sorted[i]
+ if pattern == "" or find(upper(key),pattern) then
+ report(format('%s %s=%s',instance.origins[key] or "---",key,tabstr(list[key])))
end
- if io.exists(setuptex) then
- resolvers.check_environment(tree)
- resolvers.load_environment(setuptex)
+ end
+end
+
+function resolvers.listers.variables (report,pattern) list(resolvers.instance.variables, report,pattern) end
+function resolvers.listers.expansions(report,pattern) list(resolvers.instance.expansions,report,pattern) end
+
+function resolvers.listers.configurations(report,pattern)
+ pattern = pattern and pattern ~= "" and upper(pattern) or ""
+ local report = report or texio.write_nl
+ local instance = resolvers.instance
+ local sorted = table.sortedkeys(instance.kpsevars)
+ for i=1,#sorted do
+ local key = sorted[i]
+ if pattern == "" or find(upper(key),pattern) then
+ report(format("%s\n",key))
+ local order = instance.order
+ for i=1,#order do
+ local str = order[i][key]
+ if str then
+ report(format("\t%s\t%s",i,str))
+ end
+ end
+ report("")
end
end
end
@@ -11708,111 +12155,140 @@ function states.get(key,default)
return states.get_by_tag(states.tag,key,default)
end
---~ states.data.update = {
---~ ["version"] = {
---~ ["major"] = 0,
---~ ["minor"] = 1,
---~ },
---~ ["rsync"] = {
---~ ["server"] = "contextgarden.net",
---~ ["module"] = "minimals",
---~ ["repository"] = "current",
---~ ["flags"] = "-rpztlv --stats",
---~ },
---~ ["tasks"] = {
---~ ["update"] = true,
---~ ["make"] = true,
---~ ["delete"] = false,
---~ },
---~ ["platform"] = {
---~ ["host"] = true,
---~ ["other"] = {
---~ ["mswin"] = false,
---~ ["linux"] = false,
---~ ["linux-64"] = false,
---~ ["osx-intel"] = false,
---~ ["osx-ppc"] = false,
---~ ["sun"] = false,
---~ },
---~ },
---~ ["context"] = {
---~ ["available"] = {"current", "beta", "alpha", "experimental"},
---~ ["selected"] = "current",
---~ },
---~ ["formats"] = {
---~ ["cont-en"] = true,
---~ ["cont-nl"] = true,
---~ ["cont-de"] = false,
---~ ["cont-cz"] = false,
---~ ["cont-fr"] = false,
---~ ["cont-ro"] = false,
---~ },
---~ ["engine"] = {
---~ ["pdftex"] = {
---~ ["install"] = true,
---~ ["formats"] = {
---~ ["pdftex"] = true,
---~ },
---~ },
---~ ["luatex"] = {
---~ ["install"] = true,
---~ ["formats"] = {
---~ },
---~ },
---~ ["xetex"] = {
---~ ["install"] = true,
---~ ["formats"] = {
---~ ["xetex"] = false,
---~ },
---~ },
---~ ["metapost"] = {
---~ ["install"] = true,
---~ ["formats"] = {
---~ ["mpost"] = true,
---~ ["metafun"] = true,
---~ },
---~ },
---~ },
---~ ["fonts"] = {
---~ },
---~ ["doc"] = {
---~ },
---~ ["modules"] = {
---~ ["f-urwgaramond"] = false,
---~ ["f-urwgothic"] = false,
---~ ["t-bnf"] = false,
---~ ["t-chromato"] = false,
---~ ["t-cmscbf"] = false,
---~ ["t-cmttbf"] = false,
---~ ["t-construction-plan"] = false,
---~ ["t-degrade"] = false,
---~ ["t-french"] = false,
---~ ["t-lettrine"] = false,
---~ ["t-lilypond"] = false,
---~ ["t-mathsets"] = false,
---~ ["t-tikz"] = false,
---~ ["t-typearea"] = false,
---~ ["t-vim"] = false,
---~ },
---~ }
-
---~ states.save("teststate", "update")
---~ states.load("teststate", "update")
-
---~ print(states.get_by_tag("update","rsync.server","unknown"))
---~ states.set_by_tag("update","rsync.server","oeps")
---~ print(states.get_by_tag("update","rsync.server","unknown"))
---~ states.save("teststate", "update")
---~ states.load("teststate", "update")
---~ print(states.get_by_tag("update","rsync.server","unknown"))
+
+
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['luat-fmt'] = {
+ version = 1.001,
+ comment = "companion to mtxrun",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- helper for mtxrun
+
+function environment.make_format(name)
+ -- change to format path (early as we need expanded paths)
+ local olddir = lfs.currentdir()
+ local path = caches.getwritablepath("formats") or "" -- maybe platform
+ if path ~= "" then
+ lfs.chdir(path)
+ end
+ logs.simple("format path: %s",lfs.currentdir())
+ -- check source file
+ local texsourcename = file.addsuffix(name,"tex")
+ local fulltexsourcename = resolvers.find_file(texsourcename,"tex") or ""
+ if fulltexsourcename == "" then
+ logs.simple("no tex source file with name: %s",texsourcename)
+ lfs.chdir(olddir)
+ return
+ else
+ logs.simple("using tex source file: %s",fulltexsourcename)
+ end
+ local texsourcepath = dir.expand_name(file.dirname(fulltexsourcename)) -- really needed
+ -- check specification
+ local specificationname = file.replacesuffix(fulltexsourcename,"lus")
+ local fullspecificationname = resolvers.find_file(specificationname,"tex") or ""
+ if fullspecificationname == "" then
+ specificationname = file.join(texsourcepath,"context.lus")
+ fullspecificationname = resolvers.find_file(specificationname,"tex") or ""
+ end
+ if fullspecificationname == "" then
+ logs.simple("unknown stub specification: %s",specificationname)
+ lfs.chdir(olddir)
+ return
+ end
+ local specificationpath = file.dirname(fullspecificationname)
+ -- load specification
+ local usedluastub = nil
+ local usedlualibs = dofile(fullspecificationname)
+ if type(usedlualibs) == "string" then
+ usedluastub = file.join(file.dirname(fullspecificationname),usedlualibs)
+ elseif type(usedlualibs) == "table" then
+ logs.simple("using stub specification: %s",fullspecificationname)
+ local texbasename = file.basename(name)
+ local luastubname = file.addsuffix(texbasename,"lua")
+ local lucstubname = file.addsuffix(texbasename,"luc")
+ -- pack libraries in stub
+ logs.simple("creating initialization file: %s",luastubname)
+ utils.merger.selfcreate(usedlualibs,specificationpath,luastubname)
+ -- compile stub file (does not save that much as we don't use this stub at startup any more)
+ local strip = resolvers.boolean_variable("LUACSTRIP", true)
+ if utils.lua.compile(luastubname,lucstubname,false,strip) and lfs.isfile(lucstubname) then
+ logs.simple("using compiled initialization file: %s",lucstubname)
+ usedluastub = lucstubname
+ else
+ logs.simple("using uncompiled initialization file: %s",luastubname)
+ usedluastub = luastubname
+ end
+ else
+ logs.simple("invalid stub specification: %s",fullspecificationname)
+ lfs.chdir(olddir)
+ return
+ end
+ -- generate format
+ local q = string.quote
+ local command = string.format("luatex --ini --lua=%s %s %sdump",q(usedluastub),q(fulltexsourcename),os.platform == "unix" and "\\\\" or "\\")
+ logs.simple("running command: %s\n",command)
+ os.spawn(command)
+ -- remove related mem files
+ local pattern = file.removesuffix(file.basename(usedluastub)).."-*.mem"
+ -- logs.simple("removing related mplib format with pattern '%s'", pattern)
+ local mp = dir.glob(pattern)
+ if mp then
+ for i=1,#mp do
+ local name = mp[i]
+ logs.simple("removing related mplib format %s", file.basename(name))
+ os.remove(name)
+ end
+ end
+ lfs.chdir(olddir)
+end
+
+function environment.run_format(name,data,more)
+ -- hm, rather old code here; we can now use the file.whatever functions
+ if name and name ~= "" then
+ local barename = file.removesuffix(name)
+ local fmtname = caches.getfirstreadablefile(file.addsuffix(barename,"fmt"),"formats")
+ if fmtname == "" then
+ fmtname = resolvers.find_file(file.addsuffix(barename,"fmt")) or ""
+ end
+ fmtname = resolvers.clean_path(fmtname)
+ if fmtname == "" then
+ logs.simple("no format with name: %s",name)
+ else
+ local barename = file.removesuffix(name) -- expanded name
+ local luaname = file.addsuffix(barename,"luc")
+ if not lfs.isfile(luaname) then
+ luaname = file.addsuffix(barename,"lua")
+ end
+ if not lfs.isfile(luaname) then
+ logs.simple("using format name: %s",fmtname)
+ logs.simple("no luc/lua with name: %s",barename)
+ else
+ local q = string.quote
+ local command = string.format("luatex --fmt=%s --lua=%s %s %s",q(barename),q(luaname),q(data),more ~= "" and q(more) or "")
+ logs.simple("running command: %s",command)
+ os.spawn(command)
+ end
+ end
+ end
+end
end -- of closure
-- end library merge
-own = { } -- not local
+own = { } -- not local, might change
+
+own.libs = { -- order can be made better
-own.libs = { -- todo: check which ones are really needed
'l-string.lua',
'l-lpeg.lua',
'l-table.lua',
@@ -11825,24 +12301,32 @@ own.libs = { -- todo: check which ones are really needed
'l-url.lua',
'l-dir.lua',
'l-boolean.lua',
+ 'l-unicode.lua',
'l-math.lua',
--- 'l-unicode.lua',
--- 'l-tex.lua',
'l-utils.lua',
'l-aux.lua',
--- 'l-xml.lua',
+
+ 'trac-inf.lua',
+ 'trac-set.lua',
'trac-tra.lua',
+ 'trac-log.lua',
+ 'trac-pro.lua',
+ 'luat-env.lua', -- can come before inf (as in mkiv)
+
'lxml-tab.lua',
'lxml-lpt.lua',
--- 'lxml-ent.lua',
+ -- 'lxml-ent.lua',
'lxml-mis.lua',
'lxml-aux.lua',
'lxml-xml.lua',
- 'luat-env.lua',
- 'trac-inf.lua',
- 'trac-log.lua',
- 'data-res.lua',
+
+
+ 'data-ini.lua',
+ 'data-exp.lua',
+ 'data-env.lua',
'data-tmp.lua',
+ 'data-met.lua',
+ 'data-res.lua',
'data-pre.lua',
'data-inp.lua',
'data-out.lua',
@@ -11851,13 +12335,15 @@ own.libs = { -- todo: check which ones are really needed
-- 'data-tex.lua',
-- 'data-bin.lua',
'data-zip.lua',
+ 'data-tre.lua',
'data-crl.lua',
'data-lua.lua',
- 'data-kps.lua', -- so that we can replace kpsewhich
'data-aux.lua', -- updater
- 'data-tmf.lua', -- tree files
- -- needed ?
- 'luat-sta.lua', -- states
+ 'data-tmf.lua',
+ 'data-lst.lua',
+
+ 'luat-sta.lua',
+ 'luat-fmt.lua',
}
-- We need this hack till luatex is fixed.
@@ -11870,36 +12356,61 @@ end
-- End of hack.
-own.name = (environment and environment.ownname) or arg[0] or 'luatools.lua'
+own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua'
+own.path = string.gsub(string.match(own.name,"^(.+)[\\/].-$") or ".","\\","/")
+
+local ownpath, owntree = own.path, environment and environment.ownpath or own.path
+
+own.list = {
+ '.',
+ ownpath ,
+ ownpath .. "/../sources", -- HH's development path
+ owntree .. "/../../texmf-local/tex/context/base",
+ owntree .. "/../../texmf-context/tex/context/base",
+ owntree .. "/../../texmf-dist/tex/context/base",
+ owntree .. "/../../texmf/tex/context/base",
+ owntree .. "/../../../texmf-local/tex/context/base",
+ owntree .. "/../../../texmf-context/tex/context/base",
+ owntree .. "/../../../texmf-dist/tex/context/base",
+ owntree .. "/../../../texmf/tex/context/base",
+}
+if own.path == "." then table.remove(own.list,1) end
-own.path = string.match(own.name,"^(.+)[\\/].-$") or "."
-own.list = { '.' }
-if own.path ~= '.' then
- table.insert(own.list,own.path)
+local function locate_libs()
+ for l=1,#own.libs do
+ local lib = own.libs[l]
+ for p =1,#own.list do
+ local pth = own.list[p]
+ local filename = pth .. "/" .. lib
+ local found = lfs.isfile(filename)
+ if found then
+ return pth
+ end
+ end
+ end
end
-table.insert(own.list,own.path.."/../../../tex/context/base")
-table.insert(own.list,own.path.."/mtx")
-table.insert(own.list,own.path.."/../sources")
-local function locate_libs()
- for _, lib in pairs(own.libs) do
- for _, pth in pairs(own.list) do
- local filename = string.gsub(pth .. "/" .. lib,"\\","/")
+local function load_libs()
+ local found = locate_libs()
+ if found then
+ for l=1,#own.libs do
+ local filename = found .. "/" .. own.libs[l]
local codeblob = loadfile(filename)
if codeblob then
codeblob()
- own.list = { pth } -- speed up te search
- break
end
end
+ else
+ resolvers = nil
end
end
if not resolvers then
- locate_libs()
+ load_libs()
end
+
if not resolvers then
print("")
print("Mtxrun is unable to start up due to lack of libraries. You may")
@@ -11909,7 +12420,11 @@ if not resolvers then
os.exit()
end
-logs.setprogram('MTXrun',"TDS Runner Tool 1.24",environment.arguments["verbose"] or false)
+logs.setprogram('MTXrun',"TDS Runner Tool 1.26")
+
+if environment.arguments["verbose"] then
+ trackers.enable("resolvers.locating")
+end
local instance = resolvers.reset()
@@ -11937,8 +12452,8 @@ messages.help = [[
--ifchanged=filename only execute when given file has changed (md checksum)
--iftouched=old,new only execute when given file has changed (time stamp)
---make create stubs for (context related) scripts
---remove remove stubs (context related) scripts
+--makestubs create stubs for (context related) scripts
+--removestubs remove stubs (context related) scripts
--stubpath=binpath paths where stubs wil be written
--windows create windows (mswin) stubs
--unix create unix (linux) stubs
@@ -11958,8 +12473,24 @@ messages.help = [[
--forcekpse force using kpse (handy when no mkiv and cache installed but less functionality)
--prefixes show supported prefixes
+
+--generate generate file database
+
+--variables show configuration variables
+--expansions show expanded variables
+--configurations show configuration order
+--expand-braces expand complex variable
+--expand-path expand variable (resolve paths)
+--expand-var expand variable (resolve references)
+--show-path show path expansion of ...
+--var-value report value of variable
+--find-file report file location
+--find-path report path of file
+
+--pattern=str filter variables
]]
+
runners.applications = {
["lua"] = "luatex --luaonly",
["luc"] = "luatex --luaonly",
@@ -12012,45 +12543,40 @@ end
function runners.prepare()
local checkname = environment.argument("ifchanged")
- if checkname and checkname ~= "" then
+ local verbose = environment.argument("verbose")
+ if type(checkname) == "string" and checkname ~= "" then
local oldchecksum = file.loadchecksum(checkname)
local newchecksum = file.checksum(checkname)
if oldchecksum == newchecksum then
- logs.simple("file '%s' is unchanged",checkname)
+ if verbose then
+ logs.simple("file '%s' is unchanged",checkname)
+ end
return "skip"
- else
+ elseif verbose then
logs.simple("file '%s' is changed, processing started",checkname)
end
file.savechecksum(checkname)
end
- local oldname, newname = string.split(environment.argument("iftouched") or "", ",")
- if oldname and newname and oldname ~= "" and newname ~= "" then
- if not file.needs_updating(oldname,newname) then
- logs.simple("file '%s' and '%s' have same age",oldname,newname)
- return "skip"
- else
- logs.simple("file '%s' is older than '%s'",oldname,newname)
- end
- end
- local tree = environment.argument('tree') or ""
- if environment.argument('autotree') then
- tree = os.getenv('TEXMFSTART_TREE') or os.getenv('TEXMFSTARTTREE') or tree
- end
- if tree and tree ~= "" then
- resolvers.load_tree(tree)
- end
- local env = environment.argument('environment') or ""
- if env and env ~= "" then
- for _,e in pairs(string.split(env)) do
- -- maybe force suffix when not given
- resolvers.load_tree(e)
+ local touchname = environment.argument("iftouched")
+ if type(touchname) == "string" and touchname ~= "" then
+ local oldname, newname = string.split(touchname, ",")
+ if oldname and newname and oldname ~= "" and newname ~= "" then
+ if not file.needs_updating(oldname,newname) then
+ if verbose then
+ logs.simple("file '%s' and '%s' have same age",oldname,newname)
+ end
+ return "skip"
+ elseif verbose then
+ logs.simple("file '%s' is older than '%s'",oldname,newname)
+ end
end
end
local runpath = environment.argument("path")
- if runpath and not lfs.chdir(runpath) then
+ if type(runpath) == "string" and not lfs.chdir(runpath) then
logs.simple("unable to change to path '%s'",runpath)
return "error"
end
+ runners.prepare = function() end
return "run"
end
@@ -12165,7 +12691,7 @@ function runners.execute_program(fullname)
return false
end
--- the --usekpse flag will fallback on kpse (hm, we can better update mtx-stubs)
+-- the --usekpse flag will fallback (not default) on kpse (hm, we can better update mtx-stubs)
local windows_stub = '@echo off\013\010setlocal\013\010set ownpath=%%~dp0%%\013\010texlua "%%ownpath%%mtxrun.lua" --usekpse --execute %s %%*\013\010endlocal\013\010'
local unix_stub = '#!/bin/sh\010mtxrun --usekpse --execute %s \"$@\"\010'
@@ -12288,7 +12814,7 @@ end
function runners.launch_file(filename)
instance.allresults = true
- logs.setverbose(true)
+ trackers.enable("resolvers.locating")
local pattern = environment.arguments["pattern"]
if not pattern or pattern == "" then
pattern = filename
@@ -12368,7 +12894,19 @@ function runners.find_mtx_script(filename)
return fullname
end
-function runners.execute_ctx_script(filename)
+function runners.register_arguments(...)
+ local arguments = environment.arguments_after
+ local passedon = { ... }
+ for i=#passedon,1,-1 do
+ local pi = passedon[i]
+ if pi then
+ table.insert(arguments,1,pi)
+ end
+ end
+end
+
+function runners.execute_ctx_script(filename,...)
+ runners.register_arguments(...)
local arguments = environment.arguments_after
local fullname = runners.find_mtx_script(filename) or ""
if file.extname(fullname) == "cld" then
@@ -12381,7 +12919,7 @@ function runners.execute_ctx_script(filename)
-- retry after generate but only if --autogenerate
if fullname == "" and environment.argument("autogenerate") then -- might become the default
instance.renewcache = true
- logs.setverbose(true)
+ trackers.enable("resolvers.locating")
resolvers.load()
--
fullname = runners.find_mtx_script(filename) or ""
@@ -12421,10 +12959,9 @@ function runners.execute_ctx_script(filename)
return true
end
else
- -- logs.setverbose(true)
if filename == "" or filename == "help" then
local context = resolvers.find_file("mtx-context.lua")
- logs.setverbose(true)
+ trackers.enable("resolvers.locating")
if context ~= "" then
local result = dir.glob((string.gsub(context,"mtx%-context","mtx-*"))) -- () needed
local valid = { }
@@ -12558,80 +13095,317 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
end
+ function runners.loadbase()
+ end
+
else
- resolvers.load()
+ function runners.loadbase(...)
+ if not resolvers.load(...) then
+ logs.simple("forcing cache reload")
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ if not resolvers.load(...) then
+ logs.simple("the resolver databases are not present or outdated")
+ end
+ end
+ end
end
+resolvers.load_tree(environment.argument('tree'))
+
if environment.argument("selfmerge") then
+
-- embed used libraries
- utils.merger.selfmerge(own.name,own.libs,own.list)
+
+ runners.loadbase()
+ local found = locate_libs()
+ if found then
+ utils.merger.selfmerge(own.name,own.libs,{ found })
+ end
+
elseif environment.argument("selfclean") then
+
-- remove embedded libraries
+
+ runners.loadbase()
utils.merger.selfclean(own.name)
+
elseif environment.argument("selfupdate") then
- logs.setverbose(true)
+
+ runners.loadbase()
+ trackers.enable("resolvers.locating")
resolvers.update_script(own.name,"mtxrun")
+
elseif environment.argument("ctxlua") or environment.argument("internal") then
+
-- run a script by loading it (using libs)
+
+ runners.loadbase()
ok = runners.execute_script(filename,true)
+
elseif environment.argument("script") or environment.argument("scripts") then
+
-- run a script by loading it (using libs), pass args
+
+ runners.loadbase()
if is_mkii_stub then
- -- execute mkii script
ok = runners.execute_script(filename,false,true)
else
ok = runners.execute_ctx_script(filename)
end
+
elseif environment.argument("execute") then
+
-- execute script
+
+ runners.loadbase()
ok = runners.execute_script(filename)
+
elseif environment.argument("direct") then
+
-- equals bin:
+
+ runners.loadbase()
ok = runners.execute_program(filename)
+
elseif environment.argument("edit") then
+
-- edit file
+
+ runners.loadbase()
runners.edit_script(filename)
+
elseif environment.argument("launch") then
+
+ runners.loadbase()
runners.launch_file(filename)
-elseif environment.argument("make") then
- -- make stubs
+
+elseif environment.argument("makestubs") then
+
+ -- make stubs (depricated)
+
runners.handle_stubs(true)
-elseif environment.argument("remove") then
- -- remove stub
+
+elseif environment.argument("removestubs") then
+
+ -- remove stub (depricated)
+
+ runners.loadbase()
runners.handle_stubs(false)
+
elseif environment.argument("resolve") then
+
-- resolve string
+
+ runners.loadbase()
runners.resolve_string(filename)
+
elseif environment.argument("locate") then
+
-- locate file
+
+ runners.loadbase()
runners.locate_file(filename)
-elseif environment.argument("platform")then
+
+elseif environment.argument("platform") or environment.argument("show-platform") then
+
-- locate platform
+
+ runners.loadbase()
runners.locate_platform()
+
elseif environment.argument("prefixes") then
+
+ runners.loadbase()
runners.prefixes()
+
elseif environment.argument("timedrun") then
+
-- locate platform
+
+ runners.loadbase()
runners.timedrun(filename)
+
+elseif environment.argument("variables") or environment.argument("show-variables") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--variables",filename)
+
+ resolvers.load("nofiles")
+ resolvers.listers.variables(false,environment.argument("pattern"))
+
+elseif environment.argument("expansions") or environment.argument("show-expansions") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--expansions",filename)
+
+ resolvers.load("nofiles")
+ resolvers.listers.expansions(false,environment.argument("pattern"))
+
+elseif environment.argument("configurations") or environment.argument("show-configurations") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--configurations",filename)
+
+ resolvers.load("nofiles")
+ resolvers.listers.configurations(false,environment.argument("pattern"))
+
+elseif environment.argument("find-file") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename)
+
+ resolvers.load()
+ local pattern = environment.argument("pattern")
+ local format = environment.arguments["format"] or instance.format
+ if not pattern then
+ runners.register_arguments(filename)
+ environment.initialize_arguments(environment.arguments_after)
+ resolvers.for_files(resolvers.find_files,environment.files,format)
+ elseif type(pattern) == "string" then
+ instance.allresults = true -- brrrr
+ resolvers.for_files(resolvers.find_files,{ pattern }, format)
+ end
+
+elseif environment.argument("find-path") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--find-path",filename)
+
+ resolvers.load()
+ local path = resolvers.find_path(filename, instance.my_format)
+ if logs.verbose then
+ logs.simple(path)
+ else
+ print(path)
+ end
+
+elseif environment.argument("expand-braces") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initialize_arguments(environment.arguments_after)
+ resolvers.for_files(resolvers.expand_braces, environment.files)
+
+elseif environment.argument("expand-path") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--expand-path",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initialize_arguments(environment.arguments_after)
+ resolvers.for_files(resolvers.expand_path, environment.files)
+
+elseif environment.argument("expand-var") or environment.argument("expand-variable") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--expand-var",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initialize_arguments(environment.arguments_after)
+ resolvers.for_files(resolvers.expand_var, environment.files)
+
+elseif environment.argument("show-path") or environment.argument("path-value") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--show-path",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initialize_arguments(environment.arguments_after)
+ resolvers.for_files(resolvers.show_path, environment.files)
+
+elseif environment.argument("var-value") or environment.argument("show-value") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--show-value",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initialize_arguments(environment.arguments_after)
+ resolvers.for_files(resolvers.var_value,environment.files)
+
+elseif environment.argument("format-path") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--format-path",filename)
+
+ resolvers.load()
+ logs.simple(caches.getwritablepath("format"))
+
+elseif environment.argument("pattern") then
+
+ -- luatools
+
+ runners.execute_ctx_script("mtx-base","--pattern='" .. environment.argument("pattern") .. "'",filename)
+
+elseif environment.argument("generate") then
+
+ -- luatools
+
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+
+elseif environment.argument("make") or environment.argument("ini") or environment.argument("compile") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--make",filename)
+
+ resolvers.load()
+ trackers.enable("resolvers.locating")
+ environment.make_format(filename)
+
+elseif environment.argument("run") then
+
+ -- luatools
+
+ runners.execute_ctx_script("mtx-base","--run",filename)
+
+elseif environment.argument("fmt") then
+
+ -- luatools
+
+ runners.execute_ctx_script("mtx-base","--fmt",filename)
+
+elseif environment.argument("help") and filename=='base' then
+
+ -- luatools
+
+ runners.execute_ctx_script("mtx-base","--help")
+
elseif environment.argument("help") or filename=='help' or filename == "" then
+
logs.help(messages.help)
- -- execute script
+
elseif filename:find("^bin:") then
+
+ runners.loadbase()
ok = runners.execute_program(filename)
+
elseif is_mkii_stub then
+
-- execute mkii script
+
+ runners.loadbase()
ok = runners.execute_script(filename,false,true)
-else
+
+elseif false then
+
+ runners.loadbase()
ok = runners.execute_ctx_script(filename)
if not ok then
ok = runners.execute_script(filename)
end
+
+else
+
+ runners.execute_ctx_script("mtx-base",filename)
+
+end
+
+if logs.verbose then
+ logs.simpleline()
+ logs.simple("runtime: %0.3f seconds",os.runtime())
end
-if os.platform == "unix" then
- io.write("\n")
+if os.type ~= "windows" then
+ texio.write("\n")
end
if ok == false then ok = 1 elseif ok == true then ok = 0 end
diff --git a/scripts/context/perl/mptopdf.pl b/scripts/context/perl/mptopdf.pl
index 41d1ae1f7..ec08c5306 100644
--- a/scripts/context/perl/mptopdf.pl
+++ b/scripts/context/perl/mptopdf.pl
@@ -5,7 +5,7 @@ eval '(exit $?0)' && eval 'exec perl -S $0 ${1+"$@"}' && eval 'exec perl -S $0 $
#D \module
#D [ file=mptopdf.pl,
-#D version=2000.05.29,
+#D version=2010.05.28, % 2000.05.29
#D title=converting MP to PDF,
#D subtitle=\MPTOPDF,
#D author=Hans Hagen,
@@ -27,17 +27,20 @@ use File::Basename ;
$Getopt::Long::passthrough = 1 ; # no error message
$Getopt::Long::autoabbrev = 1 ; # partial switch accepted
-my $Help = my $Latex = my $RawMP = my $MetaFun = 0 ;
+my $Help = 0;
+my $Latex = 0;
+my $RawMP = 1;
+my $MetaFun = 0 ;
my $PassOn = '' ;
&GetOptions
( "help" => \$Help ,
- "rawmp" => \$RawMP,
+ "rawmp" => \$RawMP, # option is now default, but keep for compat
"metafun" => \$MetaFun,
- "passon" => \$PassOn,
+ "passon" => \$PassOn, # option is ignored, but keep for compat
"latex" => \$Latex ) ;
-my $program = "MPtoPDF 1.3.3" ;
+my $program = "MPtoPDF 1.4.0" ;
my $pattern = "@ARGV" ; # was $ARGV[0]
my $miktex = 0 ;
my $done = 0 ;
@@ -82,22 +85,13 @@ if (($pattern eq '')||($Help)) {
}
close (INP) ;
}
- if ($RawMP) {
- if ($Latex) {
- $rest .= " $mplatexswitch" ;
- }
- if ($MetaFun) {
- $mpbin = "mpost --progname=mpost --mem=metafun" ;
- } else {
- $mpbin = "mpost --mem=mpost" ;
- }
+ if ($Latex) {
+ $rest .= " $mplatexswitch" ;
+ }
+ if ($MetaFun) {
+ $mpbin = "mpost --progname=mpost --mem=metafun" ;
} else {
- if ($Latex) {
- $rest .= " $mplatexswitch" ;
- $mpbin = "mpost --mem=mpost" ;
- } else {
- $mpbin = "texexec --mptex $PassOn " ;
- }
+ $mpbin = "mpost --mem=mpost" ;
}
my $runner = "$mpbin $rest $pattern" ;
print "\n$program : running '$runner'\n" ;
diff --git a/scripts/context/ruby/base/kpse.rb b/scripts/context/ruby/base/kpse.rb
index 0f9868784..313ebbe62 100644
--- a/scripts/context/ruby/base/kpse.rb
+++ b/scripts/context/ruby/base/kpse.rb
@@ -351,15 +351,25 @@ module Kpse
end
end
+ # def Kpse.runscript(name,filename=[],options=[])
+ # setscript(name,`texmfstart --locate #{name}`) unless @@scripts.key?(name)
+ # cmd = "#{@@scripts[name]} #{[options].flatten.join(' ')} #{[filename].flatten.join(' ')}"
+ # system(cmd)
+ # end
+
+ # def Kpse.pipescript(name,filename=[],options=[])
+ # setscript(name,`texmfstart --locate #{name}`) unless @@scripts.key?(name)
+ # cmd = "#{@@scripts[name]} #{[options].flatten.join(' ')} #{[filename].flatten.join(' ')}"
+ # `#{cmd}`
+ # end
+
def Kpse.runscript(name,filename=[],options=[])
- setscript(name,`texmfstart --locate #{name}`) unless @@scripts.key?(name)
- cmd = "#{@@scripts[name]} #{[options].flatten.join(' ')} #{[filename].flatten.join(' ')}"
+ cmd = "mtxrun --script #{name} #{[options].flatten.join(' ')} #{[filename].flatten.join(' ')}"
system(cmd)
end
def Kpse.pipescript(name,filename=[],options=[])
- setscript(name,`texmfstart --locate #{name}`) unless @@scripts.key?(name)
- cmd = "#{@@scripts[name]} #{[options].flatten.join(' ')} #{[filename].flatten.join(' ')}"
+ cmd = "mtxrun --script #{name} #{[options].flatten.join(' ')} #{[filename].flatten.join(' ')}"
`#{cmd}`
end
diff --git a/scripts/context/stubs/mswin/mtxrun.dll b/scripts/context/stubs/mswin/mtxrun.dll
index 23e476cac..4116c5a24 100644
--- a/scripts/context/stubs/mswin/mtxrun.dll
+++ b/scripts/context/stubs/mswin/mtxrun.dll
Binary files differ
diff --git a/scripts/context/stubs/mswin/mtxrun.lua b/scripts/context/stubs/mswin/mtxrun.lua
index b99327692..46db66493 100644
--- a/scripts/context/stubs/mswin/mtxrun.lua
+++ b/scripts/context/stubs/mswin/mtxrun.lua
@@ -38,8 +38,6 @@ if not modules then modules = { } end modules ['mtxrun'] = {
-- remember for subruns: _CTX_K_S_#{original}_
-- remember for subruns: TEXMFSTART.#{original} [tex.rb texmfstart.rb]
-texlua = true
-
-- begin library merge
@@ -97,13 +95,6 @@ function string:unquote()
return (gsub(self,"^([\"\'])(.*)%1$","%2"))
end
---~ function string:unquote()
---~ if find(self,"^[\'\"]") then
---~ return sub(self,2,-2)
---~ else
---~ return self
---~ end
---~ end
function string:quote() -- we could use format("%q")
return format("%q",self)
@@ -126,11 +117,6 @@ function string:limit(n,sentinel)
end
end
---~ function string:strip() -- the .- is quite efficient
---~ -- return match(self,"^%s*(.-)%s*$") or ""
---~ -- return match(self,'^%s*(.*%S)') or '' -- posted on lua list
---~ return find(s,'^%s*$') and '' or match(s,'^%s*(.*%S)')
---~ end
do -- roberto's variant:
local space = lpeg.S(" \t\v\n")
@@ -217,13 +203,6 @@ function is_number(str) -- tonumber
return find(str,"^[%-%+]?[%d]-%.?[%d+]$") == 1
end
---~ print(is_number("1"))
---~ print(is_number("1.1"))
---~ print(is_number(".1"))
---~ print(is_number("-0.1"))
---~ print(is_number("+0.1"))
---~ print(is_number("-.1"))
---~ print(is_number("+.1"))
function string:split_settings() -- no {} handling, see l-aux for lpeg variant
if find(self,"=") then
@@ -278,18 +257,6 @@ function string:totable()
return lpegmatch(pattern,self)
end
---~ local t = {
---~ "1234567123456712345671234567",
---~ "a\tb\tc",
---~ "aa\tbb\tcc",
---~ "aaa\tbbb\tccc",
---~ "aaaa\tbbbb\tcccc",
---~ "aaaaa\tbbbbb\tccccc",
---~ "aaaaaa\tbbbbbb\tcccccc",
---~ }
---~ for k,v do
---~ print(string.tabtospace(t[k]))
---~ end
function string.tabtospace(str,tab)
-- we don't handle embedded newlines
@@ -390,6 +357,11 @@ patterns.whitespace = patterns.eol + patterns.spacer
patterns.nonwhitespace = 1 - patterns.whitespace
patterns.utf8 = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
patterns.utfbom = P('\000\000\254\255') + P('\255\254\000\000') + P('\255\254') + P('\254\255') + P('\239\187\191')
+patterns.validutf8 = patterns.utf8^0 * P(-1) * Cc(true) + Cc(false)
+
+patterns.undouble = P('"')/"" * (1-P('"'))^0 * P('"')/""
+patterns.unsingle = P("'")/"" * (1-P("'"))^0 * P("'")/""
+patterns.unspacer = ((patterns.spacer^1)/"")^0
function lpeg.anywhere(pattern) --slightly adapted from website
return P { P(pattern) + 1 * V(1) } -- why so complex?
@@ -412,10 +384,6 @@ end
patterns.textline = content
---~ local p = lpeg.splitat("->",false) print(match(p,"oeps->what->more")) -- oeps what more
---~ local p = lpeg.splitat("->",true) print(match(p,"oeps->what->more")) -- oeps what->more
---~ local p = lpeg.splitat("->",false) print(match(p,"oeps")) -- oeps
---~ local p = lpeg.splitat("->",true) print(match(p,"oeps")) -- oeps
local splitters_s, splitters_m = { }, { }
@@ -484,19 +452,7 @@ function string:checkedsplit(separator)
return match(c,self)
end
---~ function lpeg.append(list,pp)
---~ local p = pp
---~ for l=1,#list do
---~ if p then
---~ p = p + P(list[l])
---~ else
---~ p = P(list[l])
---~ end
---~ end
---~ return p
---~ end
---~ from roberto's site:
local f1 = string.byte
@@ -506,6 +462,53 @@ local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 6
patterns.utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+local cache = { }
+
+function lpeg.stripper(str)
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+end
+
+function lpeg.replacer(t)
+ if #t > 0 then
+ local p
+ for i=1,#t do
+ local ti= t[i]
+ local pp = P(ti[1]) / ti[2]
+ p = (p and p + pp ) or pp
+ end
+ return Cs((p + 1)^0)
+ end
+end
+
+
+local splitters_f, splitters_s = { }, { }
+
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = C((1 - separator)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = (1 - separator)^0 * separator * C(P(1)^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+
end -- of closure
@@ -783,9 +786,6 @@ function table.one_entry(t) -- obolete, use inline code instead
return n and not next(t,n)
end
---~ function table.starts_at(t) -- obsolete, not nice anyway
---~ return ipairs(t,1)(t,0)
---~ end
function table.tohash(t,value)
local h = { }
@@ -806,12 +806,6 @@ function table.fromhash(t)
return h
end
---~ print(table.serialize(t), "\n")
---~ print(table.serialize(t,"name"), "\n")
---~ print(table.serialize(t,false), "\n")
---~ print(table.serialize(t,true), "\n")
---~ print(table.serialize(t,"name",true), "\n")
---~ print(table.serialize(t,"name",true,true), "\n")
table.serialize_functions = true
table.serialize_compact = true
@@ -871,8 +865,7 @@ local function do_serialize(root,name,depth,level,indexed)
if indexed then
handle(format("%s{",depth))
elseif name then
- --~ handle(format("%s%s={",depth,key(name)))
- if type(name) == "number" then -- or find(k,"^%d+$") then
+ if type(name) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s[0x%04X]={",depth,name))
else
@@ -901,10 +894,8 @@ local function do_serialize(root,name,depth,level,indexed)
for i=1,#sk do
local k = sk[i]
local v = root[k]
- --~ if v == root then
- -- circular
- --~ else
- local t = type(v)
+ -- circular
+ local t = type(v)
if compact and first and type(k) == "number" and k >= first and k <= last then
if t == "number" then
if hexify then
@@ -947,12 +938,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s __p__=nil,",depth))
end
elseif t == "number" then
- --~ if hexify then
- --~ handle(format("%s %s=0x%04X,",depth,key(k),v))
- --~ else
- --~ handle(format("%s %s=%s,",depth,key(k),v)) -- %.99g
- --~ end
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
else
@@ -973,8 +959,7 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "string" then
if reduce and tonumber(v) then
- --~ handle(format("%s %s=%s,",depth,key(k),v))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]=%s,",depth,k,v))
else
@@ -986,8 +971,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%q]=%s,",depth,k,v))
end
else
- --~ handle(format("%s %s=%q,",depth,key(k),v))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]=%q,",depth,k,v))
else
@@ -1001,8 +985,7 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "table" then
if not next(v) then
- --~ handle(format("%s %s={},",depth,key(k)))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]={},",depth,k))
else
@@ -1016,8 +999,7 @@ local function do_serialize(root,name,depth,level,indexed)
elseif inline then
local st = simple_table(v)
if st then
- --~ handle(format("%s %s={ %s },",depth,key(k),concat(st,", ")))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
else
@@ -1035,8 +1017,7 @@ local function do_serialize(root,name,depth,level,indexed)
do_serialize(v,k,depth,level+1)
end
elseif t == "boolean" then
- --~ handle(format("%s %s=%s,",depth,key(k),tostring(v)))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]=%s,",depth,k,tostring(v)))
else
@@ -1049,8 +1030,7 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "function" then
if functions then
- --~ handle(format('%s %s=loadstring(%q),',depth,key(k),dump(v)))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]=loadstring(%q),",depth,k,dump(v)))
else
@@ -1063,8 +1043,7 @@ local function do_serialize(root,name,depth,level,indexed)
end
end
else
- --~ handle(format("%s %s=%q,",depth,key(k),tostring(v)))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
else
@@ -1076,8 +1055,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%q]=%q,",depth,k,tostring(v)))
end
end
- --~ end
- end
+ end
end
if level > 0 then
handle(format("%s},",depth))
@@ -1118,19 +1096,11 @@ local function serialize(root,name,_handle,_reduce,_noquotes,_hexify)
handle("t={")
end
if root and next(root) then
- do_serialize(root,name,"",0,indexed)
+ do_serialize(root,name,"",0)
end
handle("}")
end
---~ name:
---~
---~ true : return { }
---~ false : { }
---~ nil : t = { }
---~ string : string = { }
---~ 'return' : return { }
---~ number : [number] = { }
function table.serialize(root,name,reduce,noquotes,hexify)
local t = { }
@@ -1353,9 +1323,6 @@ function table.swapped(t)
return s
end
---~ function table.are_equal(a,b)
---~ return table.serialize(a) == table.serialize(b)
---~ end
function table.clone(t,p) -- t is optional or nil or table
if not p then
@@ -1421,6 +1388,17 @@ function table.insert_after_value(t,value,extra)
insert(t,#t+1,extra)
end
+function table.sequenced(t,sep)
+ local s = { }
+ for k, v in next, t do -- indexed?
+ s[#s+1] = k .. "=" .. tostring(v)
+ end
+ return concat(s, sep or " | ")
+end
+
+function table.print(...)
+ print(table.serialize(...))
+end
end -- of closure
@@ -1756,17 +1734,6 @@ function set.contains(n,s)
end
end
---~ local c = set.create{'aap','noot','mies'}
---~ local s = set.tonumber(c)
---~ local t = set.totable(s)
---~ print(t['aap'])
---~ local c = set.create{'zus','wim','jet'}
---~ local s = set.tonumber(c)
---~ local t = set.totable(s)
---~ print(t['aap'])
---~ print(t['jet'])
---~ print(set.contains(t,'jet'))
---~ print(set.contains(t,'aap'))
@@ -1784,29 +1751,97 @@ if not modules then modules = { } end modules ['l-os'] = {
-- maybe build io.flush in os.execute
-local find, format, gsub = string.find, string.format, string.gsub
+local find, format, gsub, upper = string.find, string.format, string.gsub, string.upper
local random, ceil = math.random, math.ceil
+local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
+local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
+
+-- The following code permits traversing the environment table, at least
+-- in luatex. Internally all environment names are uppercase.
+
+if not os.__getenv__ then
+
+ os.__getenv__ = os.getenv
+ os.__setenv__ = os.setenv
+
+ if os.env then
-local execute, spawn, exec, ioflush = os.execute, os.spawn or os.execute, os.exec or os.execute, io.flush
+ local osgetenv = os.getenv
+ local ossetenv = os.setenv
+ local osenv = os.env local _ = osenv.PATH -- initialize the table
+
+ function os.setenv(k,v)
+ if v == nil then
+ v = ""
+ end
+ local K = upper(k)
+ osenv[K] = v
+ ossetenv(K,v)
+ end
+
+ function os.getenv(k)
+ local K = upper(k)
+ local v = osenv[K] or osenv[k] or osgetenv(K) or osgetenv(k)
+ if v == "" then
+ return nil
+ else
+ return v
+ end
+ end
+
+ else
+
+ local ossetenv = os.setenv
+ local osgetenv = os.getenv
+ local osenv = { }
+
+ function os.setenv(k,v)
+ if v == nil then
+ v = ""
+ end
+ local K = upper(k)
+ osenv[K] = v
+ end
+
+ function os.getenv(k)
+ local K = upper(k)
+ local v = osenv[K] or osgetenv(K) or osgetenv(k)
+ if v == "" then
+ return nil
+ else
+ return v
+ end
+ end
+
+ local function __index(t,k)
+ return os.getenv(k)
+ end
+ local function __newindex(t,k,v)
+ os.setenv(k,v)
+ end
+
+ os.env = { }
+
+ setmetatable(os.env, { __index = __index, __newindex = __newindex } )
+
+ end
+
+end
+
+-- end of environment hack
+
+local execute, spawn, exec, iopopen, ioflush = os.execute, os.spawn or os.execute, os.exec or os.execute, io.popen, io.flush
function os.execute(...) ioflush() return execute(...) end
function os.spawn (...) ioflush() return spawn (...) end
function os.exec (...) ioflush() return exec (...) end
+function io.popen (...) ioflush() return iopopen(...) end
function os.resultof(command)
- ioflush() -- else messed up logging
local handle = io.popen(command,"r")
- if not handle then
- -- print("unknown command '".. command .. "' in os.resultof")
- return ""
- else
- return handle:read("*all") or ""
- end
+ return handle and handle:read("*all") or ""
end
---~ os.type : windows | unix (new, we already guessed os.platform)
---~ os.name : windows | msdos | linux | macosx | solaris | .. | generic (new)
---~ os.platform : extended os.name with architecture
if not io.fileseparator then
if find(os.getenv("PATH"),";") then
@@ -1856,11 +1891,6 @@ function os.runtime()
return os.gettimeofday() - startuptime
end
---~ print(os.gettimeofday()-os.time())
---~ os.sleep(1.234)
---~ print (">>",os.runtime())
---~ print(os.date("%H:%M:%S",os.gettimeofday()))
---~ print(os.date("%H:%M:%S",os.time()))
-- no need for function anymore as we have more clever code and helpers now
-- this metatable trickery might as well disappear
@@ -1878,24 +1908,6 @@ end
setmetatable(os,osmt)
-if not os.setenv then
-
- -- we still store them but they won't be seen in
- -- child processes although we might pass them some day
- -- using command concatination
-
- local env, getenv = { }, os.getenv
-
- function os.setenv(k,v)
- env[k] = v
- end
-
- function os.getenv(k)
- return env[k] or getenv(k)
- end
-
-end
-
-- we can use HOSTTYPE on some platforms
local name, platform = os.name or "linux", os.getenv("MTX_PLATFORM") or ""
@@ -2016,7 +2028,7 @@ elseif name == "kfreebsd" then
-- we sometims have HOSTTYPE set so let's check that first
local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
if find(architecture,"x86_64") then
- platform = "kfreebsd-64"
+ platform = "kfreebsd-amd64"
else
platform = "kfreebsd-i386"
end
@@ -2093,59 +2105,81 @@ if not modules then modules = { } end modules ['l-file'] = {
file = file or { }
-local concat = table.concat
+local insert, concat = table.insert, table.concat
local find, gmatch, match, gsub, sub, char = string.find, string.gmatch, string.match, string.gsub, string.sub, string.char
local lpegmatch = lpeg.match
+local getcurrentdir = lfs.currentdir
-function file.removesuffix(filename)
- return (gsub(filename,"%.[%a%d]+$",""))
+local function dirname(name,default)
+ return match(name,"^(.+)[/\\].-$") or (default or "")
end
-function file.addsuffix(filename, suffix)
- if not suffix or suffix == "" then
- return filename
- elseif not find(filename,"%.[%a%d]+$") then
- return filename .. "." .. suffix
- else
- return filename
- end
+local function basename(name)
+ return match(name,"^.+[/\\](.-)$") or name
end
-function file.replacesuffix(filename, suffix)
- return (gsub(filename,"%.[%a%d]+$","")) .. "." .. suffix
+local function nameonly(name)
+ return (gsub(match(name,"^.+[/\\](.-)$") or name,"%..*$",""))
end
-function file.dirname(name,default)
- return match(name,"^(.+)[/\\].-$") or (default or "")
+local function extname(name,default)
+ return match(name,"^.+%.([^/\\]-)$") or default or ""
end
-function file.basename(name)
- return match(name,"^.+[/\\](.-)$") or name
+local function splitname(name)
+ local n, s = match(name,"^(.+)%.([^/\\]-)$")
+ return n or name, s or ""
end
-function file.nameonly(name)
- return (gsub(match(name,"^.+[/\\](.-)$") or name,"%..*$",""))
+file.basename = basename
+file.dirname = dirname
+file.nameonly = nameonly
+file.extname = extname
+file.suffix = extname
+
+function file.removesuffix(filename)
+ return (gsub(filename,"%.[%a%d]+$",""))
end
-function file.extname(name,default)
- return match(name,"^.+%.([^/\\]-)$") or default or ""
+function file.addsuffix(filename, suffix, criterium)
+ if not suffix or suffix == "" then
+ return filename
+ elseif criterium == true then
+ return filename .. "." .. suffix
+ elseif not criterium then
+ local n, s = splitname(filename)
+ if not s or s == "" then
+ return filename .. "." .. suffix
+ else
+ return filename
+ end
+ else
+ local n, s = splitname(filename)
+ if s and s ~= "" then
+ local t = type(criterium)
+ if t == "table" then
+ -- keep if in criterium
+ for i=1,#criterium do
+ if s == criterium[i] then
+ return filename
+ end
+ end
+ elseif t == "string" then
+ -- keep if criterium
+ if s == criterium then
+ return filename
+ end
+ end
+ end
+ return n .. "." .. suffix
+ end
end
-file.suffix = file.extname
---~ function file.join(...)
---~ local pth = concat({...},"/")
---~ pth = gsub(pth,"\\","/")
---~ local a, b = match(pth,"^(.*://)(.*)$")
---~ if a and b then
---~ return a .. gsub(b,"//+","/")
---~ end
---~ a, b = match(pth,"^(//)(.*)$")
---~ if a and b then
---~ return a .. gsub(b,"//+","/")
---~ end
---~ return (gsub(pth,"//+","/"))
---~ end
+function file.replacesuffix(filename, suffix)
+ return (gsub(filename,"%.[%a%d]+$","")) .. "." .. suffix
+end
+
local trick_1 = char(1)
local trick_2 = "^" .. trick_1 .. "/+"
@@ -2173,18 +2207,9 @@ function file.join(...)
return (gsub(pth,"//+","/"))
end
---~ print(file.join("//","/y"))
---~ print(file.join("/","/y"))
---~ print(file.join("","/y"))
---~ print(file.join("/x/","/y"))
---~ print(file.join("x/","/y"))
---~ print(file.join("http://","/y"))
---~ print(file.join("http://a","/y"))
---~ print(file.join("http:///a","/y"))
---~ print(file.join("//nas-1","/y"))
function file.iswritable(name)
- local a = lfs.attributes(name) or lfs.attributes(file.dirname(name,"."))
+ local a = lfs.attributes(name) or lfs.attributes(dirname(name,"."))
return a and sub(a.permissions,2,2) == "w"
end
@@ -2198,17 +2223,6 @@ file.is_writable = file.iswritable
-- todo: lpeg
---~ function file.split_path(str)
---~ local t = { }
---~ str = gsub(str,"\\", "/")
---~ str = gsub(str,"(%a):([;/])", "%1\001%2")
---~ for name in gmatch(str,"([^;:]+)") do
---~ if name ~= "" then
---~ t[#t+1] = gsub(name,"\001",":")
---~ end
---~ end
---~ return t
---~ end
local checkedsplit = string.checkedsplit
@@ -2223,31 +2237,62 @@ end
-- we can hash them weakly
-function file.collapse_path(str)
+
+function file.collapse_path(str,anchor)
+ if anchor and not find(str,"^/") and not find(str,"^%a:") then
+ str = getcurrentdir() .. "/" .. str
+ end
+ if str == "" or str =="." then
+ return "."
+ elseif find(str,"^%.%.") then
+ str = gsub(str,"\\","/")
+ return str
+ elseif not find(str,"%.") then
+ str = gsub(str,"\\","/")
+ return str
+ end
str = gsub(str,"\\","/")
- if find(str,"/") then
- str = gsub(str,"^%./",(gsub(lfs.currentdir(),"\\","/")) .. "/") -- ./xx in qualified
- str = gsub(str,"/%./","/")
- local n, m = 1, 1
- while n > 0 or m > 0 do
- str, n = gsub(str,"[^/%.]+/%.%.$","")
- str, m = gsub(str,"[^/%.]+/%.%./","")
+ local starter, rest = match(str,"^(%a+:/*)(.-)$")
+ if starter then
+ str = rest
+ end
+ local oldelements = checkedsplit(str,"/")
+ local newelements = { }
+ local i = #oldelements
+ while i > 0 do
+ local element = oldelements[i]
+ if element == '.' then
+ -- do nothing
+ elseif element == '..' then
+ local n = i -1
+ while n > 0 do
+ local element = oldelements[n]
+ if element ~= '..' and element ~= '.' then
+ oldelements[n] = '.'
+ break
+ else
+ n = n - 1
+ end
+ end
+ if n < 1 then
+ insert(newelements,1,'..')
+ end
+ elseif element ~= "" then
+ insert(newelements,1,element)
end
- str = gsub(str,"([^/])/$","%1")
- -- str = gsub(str,"^%./","") -- ./xx in qualified
- str = gsub(str,"/%.$","")
+ i = i - 1
+ end
+ if #newelements == 0 then
+ return starter or "."
+ elseif starter then
+ return starter .. concat(newelements, '/')
+ elseif find(str,"^/") then
+ return "/" .. concat(newelements,'/')
+ else
+ return concat(newelements, '/')
end
- if str == "" then str = "." end
- return str
end
---~ print(file.collapse_path("/a"))
---~ print(file.collapse_path("a/./b/.."))
---~ print(file.collapse_path("a/aa/../b/bb"))
---~ print(file.collapse_path("a/../.."))
---~ print(file.collapse_path("a/.././././b/.."))
---~ print(file.collapse_path("a/./././b/.."))
---~ print(file.collapse_path("a/b/c/../.."))
function file.robustname(str)
return (gsub(str,"[^%a%d%/%-%.\\]+","-"))
@@ -2262,92 +2307,23 @@ end
-- lpeg variants, slightly faster, not always
---~ local period = lpeg.P(".")
---~ local slashes = lpeg.S("\\/")
---~ local noperiod = 1-period
---~ local noslashes = 1-slashes
---~ local name = noperiod^1
-
---~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.C(noperiod^1) * -1
-
---~ function file.extname(name)
---~ return lpegmatch(pattern,name) or ""
---~ end
-
---~ local pattern = lpeg.Cs(((period * noperiod^1 * -1)/"" + 1)^1)
-
---~ function file.removesuffix(name)
---~ return lpegmatch(pattern,name)
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^1 * lpeg.C(noslashes^1) * -1
-
---~ function file.basename(name)
---~ return lpegmatch(pattern,name) or name
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^1 * lpeg.Cp() * noslashes^1 * -1
-
---~ function file.dirname(name)
---~ local p = lpegmatch(pattern,name)
---~ if p then
---~ return sub(name,1,p-2)
---~ else
---~ return ""
---~ end
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.Cp() * noperiod^1 * -1
-
---~ function file.addsuffix(name, suffix)
---~ local p = lpegmatch(pattern,name)
---~ if p then
---~ return name
---~ else
---~ return name .. "." .. suffix
---~ end
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.Cp() * noperiod^1 * -1
-
---~ function file.replacesuffix(name,suffix)
---~ local p = lpegmatch(pattern,name)
---~ if p then
---~ return sub(name,1,p-2) .. "." .. suffix
---~ else
---~ return name .. "." .. suffix
---~ end
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^0 * lpeg.Cp() * ((noperiod^1 * period)^1 * lpeg.Cp() + lpeg.P(true)) * noperiod^1 * -1
-
---~ function file.nameonly(name)
---~ local a, b = lpegmatch(pattern,name)
---~ if b then
---~ return sub(name,a,b-2)
---~ elseif a then
---~ return sub(name,a)
---~ else
---~ return name
---~ end
---~ end
-
---~ local test = file.extname
---~ local test = file.basename
---~ local test = file.dirname
---~ local test = file.addsuffix
---~ local test = file.replacesuffix
---~ local test = file.nameonly
-
---~ print(1,test("./a/b/c/abd.def.xxx","!!!"))
---~ print(2,test("./../b/c/abd.def.xxx","!!!"))
---~ print(3,test("a/b/c/abd.def.xxx","!!!"))
---~ print(4,test("a/b/c/def.xxx","!!!"))
---~ print(5,test("a/b/c/def","!!!"))
---~ print(6,test("def","!!!"))
---~ print(7,test("def.xxx","!!!"))
-
---~ local tim = os.clock() for i=1,250000 do local ext = test("abd.def.xxx","!!!") end print(os.clock()-tim)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-- also rewrite previous
@@ -2387,14 +2363,6 @@ end
-- test { "/aa", "/aa/bb", "/aa/bb/cc", "/aa/bb/cc.dd", "/aa/bb/cc.dd.ee" }
-- test { "aa", "aa/bb", "aa/bb/cc", "aa/bb/cc.dd", "aa/bb/cc.dd.ee" }
---~ -- todo:
---~
---~ if os.type == "windows" then
---~ local currentdir = lfs.currentdir
---~ function lfs.currentdir()
---~ return (gsub(currentdir(),"\\","/"))
---~ end
---~ end
end -- of closure
@@ -2420,18 +2388,6 @@ if not md5.HEX then function md5.HEX(str) return convert(str,"%02X") end end
if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end
if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end
---~ if not md5.HEX then
---~ local function remap(chr) return format("%02X",byte(chr)) end
---~ function md5.HEX(str) return (gsub(md5.sum(str),".",remap)) end
---~ end
---~ if not md5.hex then
---~ local function remap(chr) return format("%02x",byte(chr)) end
---~ function md5.hex(str) return (gsub(md5.sum(str),".",remap)) end
---~ end
---~ if not md5.dec then
---~ local function remap(chr) return format("%03i",byte(chr)) end
---~ function md5.dec(str) return (gsub(md5.sum(str),".",remap)) end
---~ end
file.needs_updating_threshold = 1
@@ -2487,9 +2443,10 @@ if not modules then modules = { } end modules ['l-url'] = {
license = "see context related readme files"
}
-local char, gmatch, gsub = string.char, string.gmatch, string.gsub
+local char, gmatch, gsub, format, byte = string.char, string.gmatch, string.gsub, string.format, string.byte
+local concat = table.concat
local tonumber, type = tonumber, type
-local lpegmatch = lpeg.match
+local lpegmatch, lpegP, lpegC, lpegR, lpegS, lpegCs, lpegCc = lpeg.match, lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc
-- from the spec (on the web):
--
@@ -2507,22 +2464,35 @@ local function tochar(s)
return char(tonumber(s,16))
end
-local colon, qmark, hash, slash, percent, endofstring = lpeg.P(":"), lpeg.P("?"), lpeg.P("#"), lpeg.P("/"), lpeg.P("%"), lpeg.P(-1)
+local colon, qmark, hash, slash, percent, endofstring = lpegP(":"), lpegP("?"), lpegP("#"), lpegP("/"), lpegP("%"), lpegP(-1)
-local hexdigit = lpeg.R("09","AF","af")
-local plus = lpeg.P("+")
-local escaped = (plus / " ") + (percent * lpeg.C(hexdigit * hexdigit) / tochar)
+local hexdigit = lpegR("09","AF","af")
+local plus = lpegP("+")
+local nothing = lpegCc("")
+local escaped = (plus / " ") + (percent * lpegC(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
-local scheme = lpeg.Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon + lpeg.Cc("")
-local authority = slash * slash * lpeg.Cs((escaped+(1- slash-qmark-hash))^0) + lpeg.Cc("")
-local path = slash * lpeg.Cs((escaped+(1- qmark-hash))^0) + lpeg.Cc("")
-local query = qmark * lpeg.Cs((escaped+(1- hash))^0) + lpeg.Cc("")
-local fragment = hash * lpeg.Cs((escaped+(1- endofstring))^0) + lpeg.Cc("")
+local scheme = lpegCs((escaped+(1-colon-slash-qmark-hash))^2) * colon + nothing
+local authority = slash * slash * lpegCs((escaped+(1- slash-qmark-hash))^0) + nothing
+local path = slash * lpegCs((escaped+(1- qmark-hash))^0) + nothing
+local query = qmark * lpegCs((escaped+(1- hash))^0) + nothing
+local fragment = hash * lpegCs((escaped+(1- endofstring))^0) + nothing
local parser = lpeg.Ct(scheme * authority * path * query * fragment)
+lpeg.patterns.urlsplitter = parser
+
+local escapes = { }
+
+for i=0,255 do
+ escapes[i] = format("%%%02X",i)
+end
+
+local escaper = lpeg.Cs((lpegR("09","AZ","az") + lpegS("-./_") + lpegP(1) / escapes)^0)
+
+lpeg.patterns.urlescaper = escaper
+
-- todo: reconsider Ct as we can as well have five return values (saves a table)
-- so we can have two parsers, one with and one without
@@ -2535,15 +2505,27 @@ end
function url.hashed(str)
local s = url.split(str)
local somescheme = s[1] ~= ""
- return {
- scheme = (somescheme and s[1]) or "file",
- authority = s[2],
- path = s[3],
- query = s[4],
- fragment = s[5],
- original = str,
- noscheme = not somescheme,
- }
+ if not somescheme then
+ return {
+ scheme = "file",
+ authority = "",
+ path = str,
+ query = "",
+ fragment = "",
+ original = str,
+ noscheme = true,
+ }
+ else
+ return {
+ scheme = s[1],
+ authority = s[2],
+ path = s[3],
+ query = s[4],
+ fragment = s[5],
+ original = str,
+ noscheme = false,
+ }
+ end
end
function url.hasscheme(str)
@@ -2554,15 +2536,25 @@ function url.addscheme(str,scheme)
return (url.hasscheme(str) and str) or ((scheme or "file:///") .. str)
end
-function url.construct(hash)
- local fullurl = hash.sheme .. "://".. hash.authority .. hash.path
- if hash.query then
- fullurl = fullurl .. "?".. hash.query
+function url.construct(hash) -- dodo: we need to escape !
+ local fullurl = { }
+ local scheme, authority, path, query, fragment = hash.scheme, hash.authority, hash.path, hash.query, hash.fragment
+ if scheme and scheme ~= "" then
+ fullurl[#fullurl+1] = scheme .. "://"
+ end
+ if authority and authority ~= "" then
+ fullurl[#fullurl+1] = authority
end
- if hash.fragment then
- fullurl = fullurl .. "?".. hash.fragment
+ if path and path ~= "" then
+ fullurl[#fullurl+1] = "/" .. path
end
- return fullurl
+ if query and query ~= "" then
+ fullurl[#fullurl+1] = "?".. query
+ end
+ if fragment and fragment ~= "" then
+ fullurl[#fullurl+1] = "#".. fragment
+ end
+ return lpegmatch(escaper,concat(fullurl))
end
function url.filename(filename)
@@ -2582,37 +2574,12 @@ function url.query(str)
end
end
---~ print(url.filename("file:///c:/oeps.txt"))
---~ print(url.filename("c:/oeps.txt"))
---~ print(url.filename("file:///oeps.txt"))
---~ print(url.filename("file:///etc/test.txt"))
---~ print(url.filename("/oeps.txt"))
-
---~ from the spec on the web (sort of):
---~
---~ function test(str)
---~ print(table.serialize(url.hashed(str)))
---~ end
---~
---~ test("%56pass%20words")
---~ test("file:///c:/oeps.txt")
---~ test("file:///c|/oeps.txt")
---~ test("file:///etc/oeps.txt")
---~ test("file://./etc/oeps.txt")
---~ test("file:////etc/oeps.txt")
---~ test("ftp://ftp.is.co.za/rfc/rfc1808.txt")
---~ test("http://www.ietf.org/rfc/rfc2396.txt")
---~ test("ldap://[2001:db8::7]/c=GB?objectClass?one#what")
---~ test("mailto:John.Doe@example.com")
---~ test("news:comp.infosystems.www.servers.unix")
---~ test("tel:+1-816-555-1212")
---~ test("telnet://192.0.2.16:80/")
---~ test("urn:oasis:names:specification:docbook:dtd:xml:4.1.2")
---~ test("/etc/passwords")
---~ test("http://www.pragma-ade.com/spaced%20name")
-
---~ test("zip:///oeps/oeps.zip#bla/bla.tex")
---~ test("zip:///oeps/oeps.zip?bla/bla.tex")
+
+
+
+
+
+
end -- of closure
@@ -2767,11 +2734,6 @@ end
dir.glob = glob
---~ list = dir.glob("**/*.tif")
---~ list = dir.glob("/**/*.tif")
---~ list = dir.glob("./**/*.tif")
---~ list = dir.glob("oeps/**/*.tif")
---~ list = dir.glob("/oeps/**/*.tif")
local function globfiles(path,recurse,func,files) -- func == pattern or function
if type(func) == "string" then
@@ -2815,10 +2777,6 @@ function dir.ls(pattern)
return table.concat(glob(pattern),"\n")
end
---~ mkdirs("temp")
---~ mkdirs("a/b/c")
---~ mkdirs(".","/a/b/c")
---~ mkdirs("a","b","c")
local make_indeed = true -- false
@@ -2878,17 +2836,6 @@ if string.find(os.getenv("PATH"),";") then -- os.type == "windows"
return pth, (lfs.isdir(pth) == true)
end
---~ print(dir.mkdirs("","","a","c"))
---~ print(dir.mkdirs("a"))
---~ print(dir.mkdirs("a:"))
---~ print(dir.mkdirs("a:/b/c"))
---~ print(dir.mkdirs("a:b/c"))
---~ print(dir.mkdirs("a:/bbb/c"))
---~ print(dir.mkdirs("/a/b/c"))
---~ print(dir.mkdirs("/aaa/b/c"))
---~ print(dir.mkdirs("//a/b/c"))
---~ print(dir.mkdirs("///a/b/c"))
---~ print(dir.mkdirs("a/bbb//ccc/"))
function dir.expand_name(str) -- will be merged with cleanpath and collapsepath
local first, nothing, last = match(str,"^(//)(//*)(.*)$")
@@ -2928,7 +2875,7 @@ else
local str, pth, t = "", "", { ... }
for i=1,#t do
local s = t[i]
- if s ~= "" then
+ if s and s ~= "" then -- we catch nil and false
if str ~= "" then
str = str .. "/" .. s
else
@@ -2962,13 +2909,6 @@ else
return pth, (lfs.isdir(pth) == true)
end
---~ print(dir.mkdirs("","","a","c"))
---~ print(dir.mkdirs("a"))
---~ print(dir.mkdirs("/a/b/c"))
---~ print(dir.mkdirs("/aaa/b/c"))
---~ print(dir.mkdirs("//a/b/c"))
---~ print(dir.mkdirs("///a/b/c"))
---~ print(dir.mkdirs("a/bbb//ccc/"))
function dir.expand_name(str) -- will be merged with cleanpath and collapsepath
if not find(str,"^/") then
@@ -3025,7 +2965,7 @@ function toboolean(str,tolerant)
end
end
-function string.is_boolean(str)
+function string.is_boolean(str,default)
if type(str) == "string" then
if str == "true" or str == "yes" or str == "on" or str == "t" then
return true
@@ -3033,7 +2973,7 @@ function string.is_boolean(str)
return false
end
end
- return nil
+ return default
end
function boolean.alwaystrue()
@@ -3049,6 +2989,211 @@ end -- of closure
do -- create closure to overcome 200 locals limit
+if not modules then modules = { } end modules ['l-unicode'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if not unicode then
+
+ unicode = { utf8 = { } }
+
+ local floor, char = math.floor, string.char
+
+ function unicode.utf8.utfchar(n)
+ if n < 0x80 then
+ return char(n)
+ elseif n < 0x800 then
+ return char(0xC0 + floor(n/0x40)) .. char(0x80 + (n % 0x40))
+ elseif n < 0x10000 then
+ return char(0xE0 + floor(n/0x1000)) .. char(0x80 + (floor(n/0x40) % 0x40)) .. char(0x80 + (n % 0x40))
+ elseif n < 0x40000 then
+ return char(0xF0 + floor(n/0x40000)) .. char(0x80 + floor(n/0x1000)) .. char(0x80 + (floor(n/0x40) % 0x40)) .. char(0x80 + (n % 0x40))
+ else -- wrong:
+ -- return char(0xF1 + floor(n/0x1000000)) .. char(0x80 + floor(n/0x40000)) .. char(0x80 + floor(n/0x1000)) .. char(0x80 + (floor(n/0x40) % 0x40)) .. char(0x80 + (n % 0x40))
+ return "?"
+ end
+ end
+
+end
+
+utf = utf or unicode.utf8
+
+local concat, utfchar, utfgsub = table.concat, utf.char, utf.gsub
+local char, byte, find, bytepairs = string.char, string.byte, string.find, string.bytepairs
+
+-- 0 EF BB BF UTF-8
+-- 1 FF FE UTF-16-little-endian
+-- 2 FE FF UTF-16-big-endian
+-- 3 FF FE 00 00 UTF-32-little-endian
+-- 4 00 00 FE FF UTF-32-big-endian
+
+unicode.utfname = {
+ [0] = 'utf-8',
+ [1] = 'utf-16-le',
+ [2] = 'utf-16-be',
+ [3] = 'utf-32-le',
+ [4] = 'utf-32-be'
+}
+
+-- \000 fails in <= 5.0 but is valid in >=5.1 where %z is depricated
+
+function unicode.utftype(f)
+ local str = f:read(4)
+ if not str then
+ f:seek('set')
+ return 0
+ -- elseif find(str,"^%z%z\254\255") then -- depricated
+ -- elseif find(str,"^\000\000\254\255") then -- not permitted and bugged
+ elseif find(str,"\000\000\254\255",1,true) then -- seems to work okay (TH)
+ return 4
+ -- elseif find(str,"^\255\254%z%z") then -- depricated
+ -- elseif find(str,"^\255\254\000\000") then -- not permitted and bugged
+ elseif find(str,"\255\254\000\000",1,true) then -- seems to work okay (TH)
+ return 3
+ elseif find(str,"^\254\255") then
+ f:seek('set',2)
+ return 2
+ elseif find(str,"^\255\254") then
+ f:seek('set',2)
+ return 1
+ elseif find(str,"^\239\187\191") then
+ f:seek('set',3)
+ return 0
+ else
+ f:seek('set')
+ return 0
+ end
+end
+
+function unicode.utf16_to_utf8(str, endian) -- maybe a gsub is faster or an lpeg
+ local result, tmp, n, m, p = { }, { }, 0, 0, 0
+ -- lf | cr | crlf / (cr:13, lf:10)
+ local function doit()
+ if n == 10 then
+ if p ~= 13 then
+ result[#result+1] = concat(tmp)
+ tmp = { }
+ p = 0
+ end
+ elseif n == 13 then
+ result[#result+1] = concat(tmp)
+ tmp = { }
+ p = n
+ else
+ tmp[#tmp+1] = utfchar(n)
+ p = 0
+ end
+ end
+ for l,r in bytepairs(str) do
+ if r then
+ if endian then
+ n = l*256 + r
+ else
+ n = r*256 + l
+ end
+ if m > 0 then
+ n = (m-0xD800)*0x400 + (n-0xDC00) + 0x10000
+ m = 0
+ doit()
+ elseif n >= 0xD800 and n <= 0xDBFF then
+ m = n
+ else
+ doit()
+ end
+ end
+ end
+ if #tmp > 0 then
+ result[#result+1] = concat(tmp)
+ end
+ return result
+end
+
+function unicode.utf32_to_utf8(str, endian)
+ local result = { }
+ local tmp, n, m, p = { }, 0, -1, 0
+ -- lf | cr | crlf / (cr:13, lf:10)
+ local function doit()
+ if n == 10 then
+ if p ~= 13 then
+ result[#result+1] = concat(tmp)
+ tmp = { }
+ p = 0
+ end
+ elseif n == 13 then
+ result[#result+1] = concat(tmp)
+ tmp = { }
+ p = n
+ else
+ tmp[#tmp+1] = utfchar(n)
+ p = 0
+ end
+ end
+ for a,b in bytepairs(str) do
+ if a and b then
+ if m < 0 then
+ if endian then
+ m = a*256*256*256 + b*256*256
+ else
+ m = b*256 + a
+ end
+ else
+ if endian then
+ n = m + a*256 + b
+ else
+ n = m + b*256*256*256 + a*256*256
+ end
+ m = -1
+ doit()
+ end
+ else
+ break
+ end
+ end
+ if #tmp > 0 then
+ result[#result+1] = concat(tmp)
+ end
+ return result
+end
+
+local function little(c)
+ local b = byte(c) -- b = c:byte()
+ if b < 0x10000 then
+ return char(b%256,b/256)
+ else
+ b = b - 0x10000
+ local b1, b2 = b/1024 + 0xD800, b%1024 + 0xDC00
+ return char(b1%256,b1/256,b2%256,b2/256)
+ end
+end
+
+local function big(c)
+ local b = byte(c)
+ if b < 0x10000 then
+ return char(b/256,b%256)
+ else
+ b = b - 0x10000
+ local b1, b2 = b/1024 + 0xD800, b%1024 + 0xDC00
+ return char(b1/256,b1%256,b2/256,b2%256)
+ end
+end
+
+function unicode.utf8_to_utf16(str,littleendian)
+ if littleendian then
+ return char(255,254) .. utfgsub(str,".",little)
+ else
+ return char(254,255) .. utfgsub(str,".",big)
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
if not modules then modules = { } end modules ['l-math'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -3106,7 +3251,7 @@ if not modules then modules = { } end modules ['l-utils'] = {
-- hm, quite unreadable
-local gsub = string.gsub
+local gsub, format = string.gsub, string.format
local concat = table.concat
local type, next = type, next
@@ -3114,81 +3259,79 @@ if not utils then utils = { } end
if not utils.merger then utils.merger = { } end
if not utils.lua then utils.lua = { } end
-utils.merger.m_begin = "begin library merge"
-utils.merger.m_end = "end library merge"
-utils.merger.pattern =
+utils.report = utils.report or print
+
+local merger = utils.merger
+
+merger.strip_comment = true
+
+local m_begin_merge = "begin library merge"
+local m_end_merge = "end library merge"
+local m_begin_closure = "do -- create closure to overcome 200 locals limit"
+local m_end_closure = "end -- of closure"
+
+local m_pattern =
"%c+" ..
- "%-%-%s+" .. utils.merger.m_begin ..
+ "%-%-%s+" .. m_begin_merge ..
"%c+(.-)%c+" ..
- "%-%-%s+" .. utils.merger.m_end ..
+ "%-%-%s+" .. m_end_merge ..
"%c+"
-function utils.merger._self_fake_()
- return
- "-- " .. "created merged file" .. "\n\n" ..
- "-- " .. utils.merger.m_begin .. "\n\n" ..
- "-- " .. utils.merger.m_end .. "\n\n"
-end
+local m_format =
+ "\n\n-- " .. m_begin_merge ..
+ "\n%s\n" ..
+ "-- " .. m_end_merge .. "\n\n"
-function utils.report(...)
- print(...)
+local m_faked =
+ "-- " .. "created merged file" .. "\n\n" ..
+ "-- " .. m_begin_merge .. "\n\n" ..
+ "-- " .. m_end_merge .. "\n\n"
+
+local function self_fake()
+ return m_faked
end
-utils.merger.strip_comment = true
+local function self_nothing()
+ return ""
+end
-function utils.merger._self_load_(name)
- local f, data = io.open(name), ""
- if f then
- utils.report("reading merge from %s",name)
- data = f:read("*all")
- f:close()
+local function self_load(name)
+ local data = io.loaddata(name) or ""
+ if data == "" then
+ utils.report("merge: unknown file %s",name)
else
- utils.report("unknown file to merge %s",name)
- end
- if data and utils.merger.strip_comment then
- -- saves some 20K
- data = gsub(data,"%-%-~[^\n\r]*[\r\n]", "")
+ utils.report("merge: inserting %s",name)
end
return data or ""
end
-function utils.merger._self_save_(name, data)
+local function self_save(name, data)
if data ~= "" then
- local f = io.open(name,'w')
- if f then
- utils.report("saving merge from %s",name)
- f:write(data)
- f:close()
+ if merger.strip_comment then
+ -- saves some 20K
+ local n = #data
+ data = gsub(data,"%-%-~[^\n\r]*[\r\n]","")
+ utils.report("merge: %s bytes of comment stripped, %s bytes of code left",n-#data,#data)
end
+ io.savedata(name,data)
+ utils.report("merge: saving %s",name)
end
end
-function utils.merger._self_swap_(data,code)
- if data ~= "" then
- return (gsub(data,utils.merger.pattern, function(s)
- return "\n\n" .. "-- "..utils.merger.m_begin .. "\n" .. code .. "\n" .. "-- "..utils.merger.m_end .. "\n\n"
- end, 1))
- else
- return ""
- end
+local function self_swap(data,code)
+ return data ~= "" and (gsub(data,m_pattern, function() return format(m_format,code) end, 1)) or ""
end
---~ stripper:
---~
---~ data = gsub(data,"%-%-~[^\n]*\n","")
---~ data = gsub(data,"\n\n+","\n")
-
-function utils.merger._self_libs_(libs,list)
- local result, f, frozen = { }, nil, false
+local function self_libs(libs,list)
+ local result, f, frozen, foundpath = { }, nil, false, nil
result[#result+1] = "\n"
if type(libs) == 'string' then libs = { libs } end
if type(list) == 'string' then list = { list } end
- local foundpath = nil
for i=1,#libs do
local lib = libs[i]
for j=1,#list do
local pth = gsub(list[j],"\\","/") -- file.clean_path
- utils.report("checking library path %s",pth)
+ utils.report("merge: checking library path %s",pth)
local name = pth .. "/" .. lib
if lfs.isfile(name) then
foundpath = pth
@@ -3197,76 +3340,58 @@ function utils.merger._self_libs_(libs,list)
if foundpath then break end
end
if foundpath then
- utils.report("using library path %s",foundpath)
+ utils.report("merge: using library path %s",foundpath)
local right, wrong = { }, { }
for i=1,#libs do
local lib = libs[i]
local fullname = foundpath .. "/" .. lib
if lfs.isfile(fullname) then
- -- right[#right+1] = lib
- utils.report("merging library %s",fullname)
- result[#result+1] = "do -- create closure to overcome 200 locals limit"
+ utils.report("merge: using library %s",fullname)
+ right[#right+1] = lib
+ result[#result+1] = m_begin_closure
result[#result+1] = io.loaddata(fullname,true)
- result[#result+1] = "end -- of closure"
+ result[#result+1] = m_end_closure
else
- -- wrong[#wrong+1] = lib
- utils.report("no library %s",fullname)
+ utils.report("merge: skipping library %s",fullname)
+ wrong[#wrong+1] = lib
end
end
if #right > 0 then
- utils.report("merged libraries: %s",concat(right," "))
+ utils.report("merge: used libraries: %s",concat(right," "))
end
if #wrong > 0 then
- utils.report("skipped libraries: %s",concat(wrong," "))
+ utils.report("merge: skipped libraries: %s",concat(wrong," "))
end
else
- utils.report("no valid library path found")
+ utils.report("merge: no valid library path found")
end
return concat(result, "\n\n")
end
-function utils.merger.selfcreate(libs,list,target)
+function merger.selfcreate(libs,list,target)
if target then
- utils.merger._self_save_(
- target,
- utils.merger._self_swap_(
- utils.merger._self_fake_(),
- utils.merger._self_libs_(libs,list)
- )
- )
- end
-end
-
-function utils.merger.selfmerge(name,libs,list,target)
- utils.merger._self_save_(
- target or name,
- utils.merger._self_swap_(
- utils.merger._self_load_(name),
- utils.merger._self_libs_(libs,list)
- )
- )
+ self_save(target,self_swap(self_fake(),self_libs(libs,list)))
+ end
end
-function utils.merger.selfclean(name)
- utils.merger._self_save_(
- name,
- utils.merger._self_swap_(
- utils.merger._self_load_(name),
- ""
- )
- )
+function merger.selfmerge(name,libs,list,target)
+ self_save(target or name,self_swap(self_load(name),self_libs(libs,list)))
end
-function utils.lua.compile(luafile, lucfile, cleanup, strip) -- defaults: cleanup=false strip=true
- -- utils.report("compiling",luafile,"into",lucfile)
+function merger.selfclean(name)
+ self_save(name,self_swap(self_load(name),self_nothing()))
+end
+
+function utils.lua.compile(luafile,lucfile,cleanup,strip) -- defaults: cleanup=false strip=true
+ utils.report("lua: compiling %s into %s",luafile,lucfile)
os.remove(lucfile)
local command = "-o " .. string.quote(lucfile) .. " " .. string.quote(luafile)
if strip ~= false then
command = "-s " .. command
end
- local done = (os.spawn("texluac " .. command) == 0) or (os.spawn("luac " .. command) == 0)
+ local done = os.spawn("texluac " .. command) == 0 or os.spawn("luac " .. command) == 0
if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
- -- utils.report("removing",luafile)
+ utils.report("lua: removing %s",luafile)
os.remove(luafile)
end
return done
@@ -3350,11 +3475,7 @@ end
function aux.settings_to_hash(str,existing)
if str and str ~= "" then
hash = existing or { }
- if moretolerant then
- lpegmatch(pattern_b_s,str)
- else
- lpegmatch(pattern_a_s,str)
- end
+ lpegmatch(pattern_a_s,str)
return hash
else
return { }
@@ -3484,12 +3605,6 @@ local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "")
local number = digit^1 * (case_1 + case_2)
local stripper = lpeg.Cs((number + 1)^0)
---~ local sample = "bla 11.00 bla 11 bla 0.1100 bla 1.00100 bla 0.00 bla 0.001 bla 1.1100 bla 0.100100100 bla 0.00100100100"
---~ collectgarbage("collect")
---~ str = string.rep(sample,10000)
---~ local ts = os.clock()
---~ lpegmatch(stripper,str)
---~ print(#str, os.clock()-ts, lpegmatch(stripper,sample))
lpeg.patterns.strip_zeros = stripper
@@ -3518,235 +3633,305 @@ function aux.accesstable(target)
return t
end
---~ function string.commaseparated(str)
---~ return gmatch(str,"([^,%s]+)")
---~ end
-- as we use this a lot ...
---~ function aux.cachefunction(action,weak)
---~ local cache = { }
---~ if weak then
---~ setmetatable(cache, { __mode = "kv" } )
---~ end
---~ local function reminder(str)
---~ local found = cache[str]
---~ if not found then
---~ found = action(str)
---~ cache[str] = found
---~ end
---~ return found
---~ end
---~ return reminder, cache
---~ end
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['trac-tra'] = {
+if not modules then modules = { } end modules ['trac-inf'] = {
version = 1.001,
- comment = "companion to trac-tra.mkiv",
+ comment = "companion to trac-inf.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
--- the <anonymous> tag is kind of generic and used for functions that are not
--- bound to a variable, like node.new, node.copy etc (contrary to for instance
--- node.has_attribute which is bound to a has_attribute local variable in mkiv)
+-- As we want to protect the global tables, we no longer store the timing
+-- in the tables themselves but in a hidden timers table so that we don't
+-- get warnings about assignments. This is more efficient than using rawset
+-- and rawget.
-local debug = require "debug"
+local format = string.format
+local clock = os.gettimeofday or os.clock -- should go in environment
-local getinfo = debug.getinfo
-local type, next = type, next
-local concat = table.concat
-local format, find, lower, gmatch, gsub = string.format, string.find, string.lower, string.gmatch, string.gsub
+local statusinfo, n, registered = { }, 0, { }
-debugger = debugger or { }
+statistics = statistics or { }
-local counters = { }
-local names = { }
+statistics.enable = true
+statistics.threshold = 0.05
--- one
+local timers = { }
-local function hook()
- local f = getinfo(2,"f").func
- local n = getinfo(2,"Sn")
--- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
- if f then
- local cf = counters[f]
- if cf == nil then
- counters[f] = 1
- names[f] = n
- else
- counters[f] = cf + 1
- end
- end
+local function hastiming(instance)
+ return instance and timers[instance]
end
-local function getname(func)
- local n = names[func]
- if n then
- if n.what == "C" then
- return n.name or '<anonymous>'
- else
- -- source short_src linedefined what name namewhat nups func
- local name = n.name or n.namewhat or n.what
- if not name or name == "" then name = "?" end
- return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+
+local function resettiming(instance)
+ timers[instance or "notimer"] = { timing = 0, loadtime = 0 }
+end
+
+local function starttiming(instance)
+ local timer = timers[instance or "notimer"]
+ if not timer then
+ timer = { }
+ timers[instance or "notimer"] = timer
+ end
+ local it = timer.timing
+ if not it then
+ it = 0
+ end
+ if it == 0 then
+ timer.starttime = clock()
+ if not timer.loadtime then
+ timer.loadtime = 0
end
- else
- return "unknown"
end
+ timer.timing = it + 1
end
-function debugger.showstats(printer,threshold)
- printer = printer or texio.write or print
- threshold = threshold or 0
- local total, grandtotal, functions = 0, 0, 0
- printer("\n") -- ugly but ok
- -- table.sort(counters)
- for func, count in next, counters do
- if count > threshold then
- local name = getname(func)
- if not find(name,"for generator") then
- printer(format("%8i %s", count, name))
- total = total + count
+
+local function stoptiming(instance, report)
+ local timer = timers[instance or "notimer"]
+ local it = timer.timing
+ if it > 1 then
+ timer.timing = it - 1
+ else
+ local starttime = timer.starttime
+ if starttime then
+ local stoptime = clock()
+ local loadtime = stoptime - starttime
+ timer.stoptime = stoptime
+ timer.loadtime = timer.loadtime + loadtime
+ if report then
+ statistics.report("load time %0.3f",loadtime)
end
+ timer.timing = 0
+ return loadtime
end
- grandtotal = grandtotal + count
- functions = functions + 1
end
- printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+ return 0
end
--- two
-
---~ local function hook()
---~ local n = getinfo(2)
---~ if n.what=="C" and not n.name then
---~ local f = tostring(debug.traceback())
---~ local cf = counters[f]
---~ if cf == nil then
---~ counters[f] = 1
---~ names[f] = n
---~ else
---~ counters[f] = cf + 1
---~ end
---~ end
---~ end
---~ function debugger.showstats(printer,threshold)
---~ printer = printer or texio.write or print
---~ threshold = threshold or 0
---~ local total, grandtotal, functions = 0, 0, 0
---~ printer("\n") -- ugly but ok
---~ -- table.sort(counters)
---~ for func, count in next, counters do
---~ if count > threshold then
---~ printer(format("%8i %s", count, func))
---~ total = total + count
---~ end
---~ grandtotal = grandtotal + count
---~ functions = functions + 1
---~ end
---~ printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
---~ end
+local function elapsedtime(instance)
+ local timer = timers[instance or "notimer"]
+ return format("%0.3f",timer and timer.loadtime or 0)
+end
--- rest
+local function elapsedindeed(instance)
+ local timer = timers[instance or "notimer"]
+ return (timer and timer.loadtime or 0) > statistics.threshold
+end
-function debugger.savestats(filename,threshold)
- local f = io.open(filename,'w')
- if f then
- debugger.showstats(function(str) f:write(str) end,threshold)
- f:close()
+local function elapsedseconds(instance,rest) -- returns nil if 0 seconds
+ if elapsedindeed(instance) then
+ return format("%s seconds %s", elapsedtime(instance),rest or "")
end
end
-function debugger.enable()
- debug.sethook(hook,"c")
+statistics.hastiming = hastiming
+statistics.resettiming = resettiming
+statistics.starttiming = starttiming
+statistics.stoptiming = stoptiming
+statistics.elapsedtime = elapsedtime
+statistics.elapsedindeed = elapsedindeed
+statistics.elapsedseconds = elapsedseconds
+
+-- general function
+
+function statistics.register(tag,fnc)
+ if statistics.enable and type(fnc) == "function" then
+ local rt = registered[tag] or (#statusinfo + 1)
+ statusinfo[rt] = { tag, fnc }
+ registered[tag] = rt
+ if #tag > n then n = #tag end
+ end
end
-function debugger.disable()
- debug.sethook()
---~ counters[debug.getinfo(2,"f").func] = nil
+function statistics.show(reporter)
+ if statistics.enable then
+ if not reporter then reporter = function(tag,data,n) texio.write_nl(tag .. " " .. data) end end
+ -- this code will move
+ local register = statistics.register
+ register("luatex banner", function()
+ return string.lower(status.banner)
+ end)
+ register("control sequences", function()
+ return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
+ end)
+ register("callbacks", function()
+ local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
+ return format("direct: %s, indirect: %s, total: %s", total-indirect, indirect, total)
+ end)
+ register("current memory usage", statistics.memused)
+ register("runtime",statistics.runtime)
+ for i=1,#statusinfo do
+ local s = statusinfo[i]
+ local r = s[2]()
+ if r then
+ reporter(s[1],r,n)
+ end
+ end
+ texio.write_nl("") -- final newline
+ statistics.enable = false
+ end
end
-function debugger.tracing()
- local n = tonumber(os.env['MTX.TRACE.CALLS']) or tonumber(os.env['MTX_TRACE_CALLS']) or 0
- if n > 0 then
- function debugger.tracing() return true end ; return true
+function statistics.show_job_stat(tag,data,n)
+ if type(data) == "table" then
+ for i=1,#data do
+ statistics.show_job_stat(tag,data[i],n)
+ end
else
- function debugger.tracing() return false end ; return false
+ texio.write_nl(format("%-15s: %s - %s","mkiv lua stats",tag:rpadd(n," "),data))
end
end
---~ debugger.enable()
+function statistics.memused() -- no math.round yet -)
+ local round = math.round or math.floor
+ return format("%s MB (ctx: %s MB)",round(collectgarbage("count")/1000), round(status.luastate_bytes/1000000))
+end
+
+starttiming(statistics)
+
+function statistics.formatruntime(runtime) -- indirect so it can be overloaded and
+ return format("%s seconds", runtime) -- indeed that happens in cure-uti.lua
+end
+
+function statistics.runtime()
+ stoptiming(statistics)
+ return statistics.formatruntime(elapsedtime(statistics))
+end
+
+function statistics.timed(action,report)
+ report = report or logs.simple
+ starttiming("run")
+ action()
+ stoptiming("run")
+ report("total runtime: %s",elapsedtime("run"))
+end
+
+-- where, not really the best spot for this:
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
+commands = commands or { }
+
+function commands.resettimer(name)
+ resettiming(name or "whatever")
+ starttiming(name or "whatever")
+end
---~ debugger.disable()
+function commands.elapsedtime(name)
+ stoptiming(name or "whatever")
+ tex.sprint(elapsedtime(name or "whatever"))
+end
---~ print("")
---~ debugger.showstats()
---~ print("")
---~ debugger.showstats(print,3)
-setters = setters or { }
-setters.data = setters.data or { }
+end -- of closure
---~ local function set(t,what,value)
---~ local data, done = t.data, t.done
---~ if type(what) == "string" then
---~ what = aux.settings_to_array(what) -- inefficient but ok
---~ end
---~ for i=1,#what do
---~ local w = what[i]
---~ for d, f in next, data do
---~ if done[d] then
---~ -- prevent recursion due to wildcards
---~ elseif find(d,w) then
---~ done[d] = true
---~ for i=1,#f do
---~ f[i](value)
---~ end
---~ end
---~ end
---~ end
---~ end
+do -- create closure to overcome 200 locals limit
-local function set(t,what,value)
+if not modules then modules = { } end modules ['trac-set'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local type, next, tostring = type, next, tostring
+local concat = table.concat
+local format, find, lower, gsub = string.format, string.find, string.lower, string.gsub
+local is_boolean = string.is_boolean
+
+setters = { }
+
+local data = { } -- maybe just local
+
+-- We can initialize from the cnf file. This is sort of tricky as
+-- laster defined setters also need to be initialized then. If set
+-- this way, we need to ensure that they are not reset later on.
+
+local trace_initialize = false
+
+local function report(what,filename,name,key,value)
+ texio.write_nl(format("%s setter, filename: %s, name: %s, key: %s, value: %s",what,filename,name,key,value))
+end
+
+function setters.initialize(filename,name,values) -- filename only for diagnostics
+ local data = data[name]
+ if data then
+ data = data.data
+ if data then
+ for key, value in next, values do
+ key = gsub(key,"_",".")
+ value = is_boolean(value,value)
+ local functions = data[key]
+ if functions then
+ if #functions > 0 and not functions.value then
+ if trace_initialize then
+ report("doing",filename,name,key,value)
+ end
+ for i=1,#functions do
+ functions[i](value)
+ end
+ functions.value = value
+ else
+ if trace_initialize then
+ report("skipping",filename,name,key,value)
+ end
+ end
+ else
+ -- we do a simple preregistration i.e. not in the
+ -- list as it might be an obsolete entry
+ functions = { default = value }
+ data[key] = functions
+ if trace_initialize then
+ report("storing",filename,name,key,value)
+ end
+ end
+ end
+ end
+ end
+end
+
+-- user interface code
+
+local function set(t,what,newvalue)
local data, done = t.data, t.done
if type(what) == "string" then
what = aux.settings_to_hash(what) -- inefficient but ok
end
- for w, v in next, what do
- if v == "" then
- v = value
+ for w, value in next, what do
+ if value == "" then
+ value = newvalue
+ elseif not value then
+ value = false -- catch nil
else
- v = toboolean(v)
+ value = is_boolean(value,value)
end
- for d, f in next, data do
- if done[d] then
+ for name, functions in next, data do
+ if done[name] then
-- prevent recursion due to wildcards
- elseif find(d,w) then
- done[d] = true
- for i=1,#f do
- f[i](v)
+ elseif find(name,w) then
+ done[name] = true
+ for i=1,#functions do
+ functions[i](value)
end
+ functions.value = value
end
end
end
end
local function reset(t)
- for d, f in next, t.data do
- for i=1,#f do
- f[i](false)
+ for name, functions in next, t.data do
+ for i=1,#functions do
+ functions[i](false)
end
+ functions.value = false
end
end
@@ -3767,17 +3952,26 @@ end
function setters.register(t,what,...)
local data = t.data
what = lower(what)
- local w = data[what]
- if not w then
- w = { }
- data[what] = w
+ local functions = data[what]
+ if not functions then
+ functions = { }
+ data[what] = functions
end
+ local default = functions.default -- can be set from cnf file
for _, fnc in next, { ... } do
local typ = type(fnc)
- if typ == "function" then
- w[#w+1] = fnc
- elseif typ == "string" then
- w[#w+1] = function(value) set(t,fnc,value,nesting) end
+ if typ == "string" then
+ local s = fnc -- else wrong reference
+ fnc = function(value) set(t,s,value) end
+ elseif typ ~= "function" then
+ fnc = nil
+ end
+ if fnc then
+ functions[#functions+1] = fnc
+ if default then
+ fnc(default)
+ functions.value = default
+ end
end
end
end
@@ -3818,8 +4012,16 @@ end
function setters.show(t)
commands.writestatus("","")
local list = setters.list(t)
+ local category = t.name
for k=1,#list do
- commands.writestatus(t.name,list[k])
+ local name = list[k]
+ local functions = t.data[name]
+ if functions then
+ local value, default, modules = functions.value, functions.default, #functions
+ value = value == nil and "unset" or tostring(value)
+ default = default == nil and "unset" or tostring(default)
+ commands.writestatus(category,format("%-25s modules: %2i default: %5s value: %5s",name,modules,default,value))
+ end
end
commands.writestatus("","")
end
@@ -3832,7 +4034,7 @@ end
function setters.new(name)
local t
t = {
- data = { },
+ data = { }, -- indexed, but also default and value fields
name = name,
enable = function(...) setters.enable (t,...) end,
disable = function(...) setters.disable (t,...) end,
@@ -3840,7 +4042,7 @@ function setters.new(name)
list = function(...) setters.list (t,...) end,
show = function(...) setters.show (t,...) end,
}
- setters.data[name] = t
+ data[name] = t
return t
end
@@ -3858,12 +4060,12 @@ local e = directives.enable
local d = directives.disable
function directives.enable(...)
- commands.writestatus("directives","enabling: %s",concat({...}," "))
+ (commands.writestatus or logs.report)("directives","enabling: %s",concat({...}," "))
e(...)
end
function directives.disable(...)
- commands.writestatus("directives","disabling: %s",concat({...}," "))
+ (commands.writestatus or logs.report)("directives","disabling: %s",concat({...}," "))
d(...)
end
@@ -3871,12 +4073,12 @@ local e = experiments.enable
local d = experiments.disable
function experiments.enable(...)
- commands.writestatus("experiments","enabling: %s",concat({...}," "))
+ (commands.writestatus or logs.report)("experiments","enabling: %s",concat({...}," "))
e(...)
end
function experiments.disable(...)
- commands.writestatus("experiments","disabling: %s",concat({...}," "))
+ (commands.writestatus or logs.report)("experiments","disabling: %s",concat({...}," "))
d(...)
end
@@ -3887,6 +4089,946 @@ directives.register("system.nostatistics", function(v)
end)
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['trac-tra'] = {
+ version = 1.001,
+ comment = "companion to trac-tra.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- the <anonymous> tag is kind of generic and used for functions that are not
+-- bound to a variable, like node.new, node.copy etc (contrary to for instance
+-- node.has_attribute which is bound to a has_attribute local variable in mkiv)
+
+local debug = require "debug"
+
+local getinfo = debug.getinfo
+local type, next = type, next
+local format, find = string.format, string.find
+local is_boolean = string.is_boolean
+
+debugger = debugger or { }
+
+local counters = { }
+local names = { }
+
+-- one
+
+local function hook()
+ local f = getinfo(2,"f").func
+ local n = getinfo(2,"Sn")
+-- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
+ if f then
+ local cf = counters[f]
+ if cf == nil then
+ counters[f] = 1
+ names[f] = n
+ else
+ counters[f] = cf + 1
+ end
+ end
+end
+
+local function getname(func)
+ local n = names[func]
+ if n then
+ if n.what == "C" then
+ return n.name or '<anonymous>'
+ else
+ -- source short_src linedefined what name namewhat nups func
+ local name = n.name or n.namewhat or n.what
+ if not name or name == "" then name = "?" end
+ return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+ end
+ else
+ return "unknown"
+ end
+end
+
+function debugger.showstats(printer,threshold)
+ printer = printer or texio.write or print
+ threshold = threshold or 0
+ local total, grandtotal, functions = 0, 0, 0
+ printer("\n") -- ugly but ok
+ -- table.sort(counters)
+ for func, count in next, counters do
+ if count > threshold then
+ local name = getname(func)
+ if not find(name,"for generator") then
+ printer(format("%8i %s", count, name))
+ total = total + count
+ end
+ end
+ grandtotal = grandtotal + count
+ functions = functions + 1
+ end
+ printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+end
+
+-- two
+
+
+-- rest
+
+function debugger.savestats(filename,threshold)
+ local f = io.open(filename,'w')
+ if f then
+ debugger.showstats(function(str) f:write(str) end,threshold)
+ f:close()
+ end
+end
+
+function debugger.enable()
+ debug.sethook(hook,"c")
+end
+
+function debugger.disable()
+ debug.sethook()
+end
+
+local function trace_calls(n)
+ debugger.enable()
+ luatex.register_stop_actions(function()
+ debugger.disable()
+ debugger.savestats(tex.jobname .. "-luacalls.log",tonumber(n))
+ end)
+ trace_calls = function() end
+end
+
+if directives then
+ directives.register("system.tracecalls", function(n) trace_calls(n) end) -- indirect is needed for nilling
+end
+
+
+
+
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['trac-log'] = {
+ version = 1.001,
+ comment = "companion to trac-log.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- xml logging is only usefull in normal runs, not in ini mode
+-- it looks like some tex logging (like filenames) is broken (no longer
+-- interceoted at the tex end so the xml variant is not that useable now)
+
+
+local write_nl, write = texio and texio.write_nl or print, texio and texio.write or io.write
+local format, gmatch = string.format, string.gmatch
+local texcount = tex and tex.count
+
+--[[ldx--
+<p>This is a prelude to a more extensive logging module. For the sake
+of parsing log files, in addition to the standard logging we will
+provide an <l n='xml'/> structured file. Actually, any logging that
+is hooked into callbacks will be \XML\ by default.</p>
+--ldx]]--
+
+logs = logs or { }
+
+--[[ldx--
+<p>This looks pretty ugly but we need to speed things up a bit.</p>
+--ldx]]--
+
+local moreinfo = [[
+More information about ConTeXt and the tools that come with it can be found at:
+
+maillist : ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+webpage : http://www.pragma-ade.nl / http://tex.aanhet.net
+wiki : http://contextgarden.net
+]]
+
+local functions = {
+ 'report', 'status', 'start', 'stop', 'push', 'pop', 'line', 'direct',
+ 'start_run', 'stop_run',
+ 'start_page_number', 'stop_page_number',
+ 'report_output_pages', 'report_output_log',
+ 'report_tex_stat', 'report_job_stat',
+ 'show_open', 'show_close', 'show_load',
+ 'dummy',
+}
+
+local method = "nop"
+
+function logs.set_method(newmethod)
+ method = newmethod
+ -- a direct copy might be faster but let's try this for a while
+ setmetatable(logs, { __index = logs[method] })
+end
+
+function logs.get_method()
+ return method
+end
+
+-- installer
+
+local data = { }
+
+function logs.new(category)
+ local logger = data[category]
+ if not logger then
+ logger = function(...)
+ logs.report(category,...)
+ end
+ data[category] = logger
+ end
+ return logger
+end
+
+
+
+-- nop logging (maybe use __call instead)
+
+local noplog = { } logs.nop = noplog setmetatable(logs, { __index = noplog })
+
+for i=1,#functions do
+ noplog[functions[i]] = function() end
+end
+
+-- tex logging
+
+local texlog = { } logs.tex = texlog setmetatable(texlog, { __index = noplog })
+
+function texlog.report(a,b,c,...)
+ if c then
+ write_nl(format("%-16s> %s\n",a,format(b,c,...)))
+ elseif b then
+ write_nl(format("%-16s> %s\n",a,b))
+ else
+ write_nl(format("%-16s>\n",a))
+ end
+end
+
+function texlog.status(a,b,c,...)
+ if c then
+ write_nl(format("%-16s: %s\n",a,format(b,c,...)))
+ elseif b then
+ write_nl(format("%-16s: %s\n",a,b)) -- b can have %'s
+ else
+ write_nl(format("%-16s:>\n",a))
+ end
+end
+
+function texlog.line(fmt,...) -- new
+ if fmt then
+ write_nl(format(fmt,...))
+ else
+ write_nl("")
+ end
+end
+
+local real, user, sub
+
+function texlog.start_page_number()
+ real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+end
+
+local report_pages = logs.new("pages") -- not needed but saves checking when we grep for it
+
+function texlog.stop_page_number()
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ else
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ end
+ else
+ report_pages("flushing realpage %s",real)
+ end
+ else
+ report_pages("flushing page")
+ end
+ io.flush()
+end
+
+texlog.report_job_stat = statistics and statistics.show_job_stat
+
+-- xml logging
+
+local xmllog = { } logs.xml = xmllog setmetatable(xmllog, { __index = noplog })
+
+function xmllog.report(category,fmt,s,...) -- new
+ if s then
+ write_nl(format("<r category='%s'>%s</r>",category,format(fmt,s,...)))
+ elseif fmt then
+ write_nl(format("<r category='%s'>%s</r>",category,fmt))
+ else
+ write_nl(format("<r category='%s'/>",category))
+ end
+end
+
+function xmllog.status(category,fmt,s,...)
+ if s then
+ write_nl(format("<s category='%s'>%s</r>",category,format(fmt,s,...)))
+ elseif fmt then
+ write_nl(format("<s category='%s'>%s</r>",category,fmt))
+ else
+ write_nl(format("<s category='%s'/>",category))
+ end
+end
+
+function xmllog.line(fmt,...) -- new
+ if fmt then
+ write_nl(format("<r>%s</r>",format(fmt,...)))
+ else
+ write_nl("<r/>")
+ end
+end
+
+function xmllog.start() write_nl("<%s>" ) end
+function xmllog.stop () write_nl("</%s>") end
+function xmllog.push () write_nl("<!-- ") end
+function xmllog.pop () write_nl(" -->" ) end
+
+function xmllog.start_run()
+ write_nl("<?xml version='1.0' standalone='yes'?>")
+ write_nl("<job>") -- xmlns='www.pragma-ade.com/luatex/schemas/context-job.rng'
+ write_nl("")
+end
+
+function xmllog.stop_run()
+ write_nl("</job>")
+end
+
+function xmllog.start_page_number()
+ write_nl(format("<p real='%s' page='%s' sub='%s'", texcount.realpageno, texcount.userpageno, texcount.subpageno))
+end
+
+function xmllog.stop_page_number()
+ write("/>")
+ write_nl("")
+end
+
+function xmllog.report_output_pages(p,b)
+ write_nl(format("<v k='pages' v='%s'/>", p))
+ write_nl(format("<v k='bytes' v='%s'/>", b))
+ write_nl("")
+end
+
+function xmllog.report_output_log()
+ -- nothing
+end
+
+function xmllog.report_tex_stat(k,v)
+ write_nl("log","<v k='"..k.."'>"..tostring(v).."</v>")
+end
+
+local nesting = 0
+
+function xmllog.show_open(name)
+ nesting = nesting + 1
+ write_nl(format("<f l='%s' n='%s'>",nesting,name))
+end
+
+function xmllog.show_close(name)
+ write("</f> ")
+ nesting = nesting - 1
+end
+
+function xmllog.show_load(name)
+ write_nl(format("<f l='%s' n='%s'/>",nesting+1,name))
+end
+
+-- initialization
+
+if tex and (tex.jobname or tex.formatname) then
+ -- todo: this can be set in mtxrun ... or maybe we should just forget about this alternative format
+ if (os.getenv("mtx.directives.logmethod") or os.getenv("mtx_directives_logmethod")) == "xml" then
+ logs.set_method('xml')
+ else
+ logs.set_method('tex')
+ end
+else
+ logs.set_method('nop')
+end
+
+-- logging in runners -> these are actually the nop loggers
+
+local name, banner = 'report', 'context'
+
+function noplog.report(category,fmt,...) -- todo: fmt,s
+ if fmt then
+ write_nl(format("%s | %s: %s",name,category,format(fmt,...)))
+ elseif category then
+ write_nl(format("%s | %s",name,category))
+ else
+ write_nl(format("%s |",name))
+ end
+end
+
+noplog.status = noplog.report -- just to be sure, never used
+
+function noplog.simple(fmt,...) -- todo: fmt,s
+ if fmt then
+ write_nl(format("%s | %s",name,format(fmt,...)))
+ else
+ write_nl(format("%s |",name))
+ end
+end
+
+if utils then
+ utils.report = function(...) logs.simple(...) end
+end
+
+function logs.setprogram(newname,newbanner)
+ name, banner = newname, newbanner
+end
+
+function logs.extendbanner(newbanner)
+ banner = banner .. " | ".. newbanner
+end
+
+function logs.reportlines(str) -- todo: <lines></lines>
+ for line in gmatch(str,"(.-)[\n\r]") do
+ logs.report(line)
+ end
+end
+
+function logs.reportline() -- for scripts too
+ logs.report()
+end
+
+function logs.simpleline()
+ logs.report()
+end
+
+function logs.simplelines(str) -- todo: <lines></lines>
+ for line in gmatch(str,"(.-)[\n\r]") do
+ logs.simple(line)
+ end
+end
+
+function logs.reportbanner() -- for scripts too
+ logs.report(banner)
+end
+
+function logs.help(message,option)
+ logs.reportbanner()
+ logs.reportline()
+ logs.reportlines(message)
+ if option ~= "nomoreinfo" then
+ logs.reportline()
+ logs.reportlines(moreinfo)
+ end
+end
+
+-- logging to a file
+
+
+function logs.system(whereto,process,jobname,category,...)
+ local message = format("%s %s => %s => %s => %s\r",os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...))
+ for i=1,10 do
+ local f = io.open(whereto,"a")
+ if f then
+ f:write(message)
+ f:close()
+ break
+ else
+ sleep(0.1)
+ end
+ end
+end
+
+-- bonus
+
+function logs.fatal(where,...)
+ logs.report(where,"fatal error: %s, aborting now",format(...))
+ os.exit()
+end
+
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['trac-pro'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local getmetatable, setmetatable, rawset, type = getmetatable, setmetatable, rawset, type
+
+-- The protection implemented here is probably not that tight but good enough to catch
+-- problems due to naive usage.
+--
+-- There's a more extensive version (trac-xxx.lua) that supports nesting.
+--
+-- This will change when we have _ENV in lua 5.2+
+
+local trace_namespaces = false trackers.register("system.namespaces", function(v) trace_namespaces = v end)
+
+local report_system = logs.new("system")
+
+namespaces = { }
+
+local registered = { }
+
+local function report_index(k,name)
+ if trace_namespaces then
+ report_system("reference to '%s' in protected namespace '%s', %s",k,name,debug.traceback())
+ else
+ report_system("reference to '%s' in protected namespace '%s'",k,name)
+ end
+end
+
+local function report_newindex(k,name)
+ if trace_namespaces then
+ report_system("assignment to '%s' in protected namespace '%s', %s",k,name,debug.traceback())
+ else
+ report_system("assignment to '%s' in protected namespace '%s'",k,name)
+ end
+end
+
+local function register(name)
+ local data = name == "global" and _G or _G[name]
+ if not data then
+ return -- error
+ end
+ registered[name] = data
+ local m = getmetatable(data)
+ if not m then
+ m = { }
+ setmetatable(data,m)
+ end
+ local index, newindex = { }, { }
+ m.__saved__index = m.__index
+ m.__no__index = function(t,k)
+ if not index[k] then
+ index[k] = true
+ report_index(k,name)
+ end
+ return nil
+ end
+ m.__saved__newindex = m.__newindex
+ m.__no__newindex = function(t,k,v)
+ if not newindex[k] then
+ newindex[k] = true
+ report_newindex(k,name)
+ end
+ rawset(t,k,v)
+ end
+ m.__protection__depth = 0
+end
+
+local function private(name) -- maybe save name
+ local data = registered[name]
+ if not data then
+ data = _G[name]
+ if not data then
+ data = { }
+ _G[name] = data
+ end
+ register(name)
+ end
+ return data
+end
+
+local function protect(name)
+ local data = registered[name]
+ if not data then
+ return
+ end
+ local m = getmetatable(data)
+ local pd = m.__protection__depth
+ if pd > 0 then
+ m.__protection__depth = pd + 1
+ else
+ m.__save_d_index, m.__saved__newindex = m.__index, m.__newindex
+ m.__index, m.__newindex = m.__no__index, m.__no__newindex
+ m.__protection__depth = 1
+ end
+end
+
+local function unprotect(name)
+ local data = registered[name]
+ if not data then
+ return
+ end
+ local m = getmetatable(data)
+ local pd = m.__protection__depth
+ if pd > 1 then
+ m.__protection__depth = pd - 1
+ else
+ m.__index, m.__newindex = m.__saved__index, m.__saved__newindex
+ m.__protection__depth = 0
+ end
+end
+
+local function protectall()
+ for name, _ in next, registered do
+ if name ~= "global" then
+ protect(name)
+ end
+ end
+end
+
+local function unprotectall()
+ for name, _ in next, registered do
+ if name ~= "global" then
+ unprotect(name)
+ end
+ end
+end
+
+namespaces.register = register -- register when defined
+namespaces.private = private -- allocate and register if needed
+namespaces.protect = protect
+namespaces.unprotect = unprotect
+namespaces.protectall = protectall
+namespaces.unprotectall = unprotectall
+
+namespaces.private("namespaces") registered = { } register("global") -- unreachable
+
+directives.register("system.protect", function(v)
+ if v then
+ protectall()
+ else
+ unprotectall()
+ end
+end)
+
+directives.register("system.checkglobals", function(v)
+ if v then
+ report_system("enabling global namespace guard")
+ protect("global")
+ else
+ report_system("disabling global namespace guard")
+ unprotect("global")
+ end
+end)
+
+-- dummy section (will go to luat-dum.lua)
+
+
+
+
+
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['luat-env'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- A former version provided functionality for non embeded core
+-- scripts i.e. runtime library loading. Given the amount of
+-- Lua code we use now, this no longer makes sense. Much of this
+-- evolved before bytecode arrays were available and so a lot of
+-- code has disappeared already.
+
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+
+local report_resolvers = logs.new("resolvers")
+
+local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find
+local unquote, quote = string.unquote, string.quote
+
+-- precautions
+
+os.setlocale(nil,nil) -- useless feature and even dangerous in luatex
+
+function os.setlocale()
+ -- no way you can mess with it
+end
+
+-- dirty tricks
+
+if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaonly" then
+ arg[-1] = arg[0]
+ arg[ 0] = arg[2]
+ for k=3,#arg do
+ arg[k-2] = arg[k]
+ end
+ arg[#arg] = nil -- last
+ arg[#arg] = nil -- pre-last
+end
+
+-- environment
+
+environment = environment or { }
+environment.arguments = { }
+environment.files = { }
+environment.sortedflags = nil
+
+local mt = {
+ __index = function(_,k)
+ if k == "version" then
+ local version = tex.toks and tex.toks.contextversiontoks
+ if version and version ~= "" then
+ rawset(environment,"version",version)
+ return version
+ else
+ return "unknown"
+ end
+ elseif k == "jobname" or k == "formatname" then
+ local name = tex and tex[k]
+ if name or name== "" then
+ rawset(environment,k,name)
+ return name
+ else
+ return "unknown"
+ end
+ elseif k == "outputfilename" then
+ local name = environment.jobname
+ rawset(environment,k,name)
+ return name
+ end
+ end
+}
+
+setmetatable(environment,mt)
+
+function environment.initialize_arguments(arg)
+ local arguments, files = { }, { }
+ environment.arguments, environment.files, environment.sortedflags = arguments, files, nil
+ for index=1,#arg do
+ local argument = arg[index]
+ if index > 0 then
+ local flag, value = match(argument,"^%-+(.-)=(.-)$")
+ if flag then
+ arguments[flag] = unquote(value or "")
+ else
+ flag = match(argument,"^%-+(.+)")
+ if flag then
+ arguments[flag] = true
+ else
+ files[#files+1] = argument
+ end
+ end
+ end
+ end
+ environment.ownname = environment.ownname or arg[0] or 'unknown.lua'
+end
+
+function environment.setargument(name,value)
+ environment.arguments[name] = value
+end
+
+-- todo: defaults, better checks e.g on type (boolean versus string)
+--
+-- tricky: too many hits when we support partials unless we add
+-- a registration of arguments so from now on we have 'partial'
+
+function environment.argument(name,partial)
+ local arguments, sortedflags = environment.arguments, environment.sortedflags
+ if arguments[name] then
+ return arguments[name]
+ elseif partial then
+ if not sortedflags then
+ sortedflags = table.sortedkeys(arguments)
+ for k=1,#sortedflags do
+ sortedflags[k] = "^" .. sortedflags[k]
+ end
+ environment.sortedflags = sortedflags
+ end
+ -- example of potential clash: ^mode ^modefile
+ for k=1,#sortedflags do
+ local v = sortedflags[k]
+ if find(name,v) then
+ return arguments[sub(v,2,#v)]
+ end
+ end
+ end
+ return nil
+end
+
+function environment.split_arguments(separator) -- rather special, cut-off before separator
+ local done, before, after = false, { }, { }
+ local original_arguments = environment.original_arguments
+ for k=1,#original_arguments do
+ local v = original_arguments[k]
+ if not done and v == separator then
+ done = true
+ elseif done then
+ after[#after+1] = v
+ else
+ before[#before+1] = v
+ end
+ end
+ return before, after
+end
+
+function environment.reconstruct_commandline(arg,noquote)
+ arg = arg or environment.original_arguments
+ if noquote and #arg == 1 then
+ local a = arg[1]
+ a = resolvers.resolve(a)
+ a = unquote(a)
+ return a
+ elseif #arg > 0 then
+ local result = { }
+ for i=1,#arg do
+ local a = arg[i]
+ a = resolvers.resolve(a)
+ a = unquote(a)
+ a = gsub(a,'"','\\"') -- tricky
+ if find(a," ") then
+ result[#result+1] = quote(a)
+ else
+ result[#result+1] = a
+ end
+ end
+ return table.join(result," ")
+ else
+ return ""
+ end
+end
+
+if arg then
+
+ -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later)
+ local newarg, instring = { }, false
+
+ for index=1,#arg do
+ local argument = arg[index]
+ if find(argument,"^\"") then
+ newarg[#newarg+1] = gsub(argument,"^\"","")
+ if not find(argument,"\"$") then
+ instring = true
+ end
+ elseif find(argument,"\"$") then
+ newarg[#newarg] = newarg[#newarg] .. " " .. gsub(argument,"\"$","")
+ instring = false
+ elseif instring then
+ newarg[#newarg] = newarg[#newarg] .. " " .. argument
+ else
+ newarg[#newarg+1] = argument
+ end
+ end
+ for i=1,-5,-1 do
+ newarg[i] = arg[i]
+ end
+
+ environment.initialize_arguments(newarg)
+ environment.original_arguments = newarg
+ environment.raw_arguments = arg
+
+ arg = { } -- prevent duplicate handling
+
+end
+
+-- weird place ... depends on a not yet loaded module
+
+function environment.texfile(filename)
+ return resolvers.find_file(filename,'tex')
+end
+
+function environment.luafile(filename)
+ local resolved = resolvers.find_file(filename,'tex') or ""
+ if resolved ~= "" then
+ return resolved
+ end
+ resolved = resolvers.find_file(filename,'texmfscripts') or ""
+ if resolved ~= "" then
+ return resolved
+ end
+ return resolvers.find_file(filename,'luatexlibs') or ""
+end
+
+environment.loadedluacode = loadfile -- can be overloaded
+
+function environment.luafilechunk(filename,silent) -- used for loading lua bytecode in the format
+ filename = file.replacesuffix(filename, "lua")
+ local fullname = environment.luafile(filename)
+ if fullname and fullname ~= "" then
+ local data = environment.loadedluacode(fullname)
+ if trace_locating then
+ report_resolvers("loading file %s%s", fullname, not data and " failed" or "")
+ elseif not silent then
+ texio.write("<",data and "+ " or "- ",fullname,">")
+ end
+ return data
+ else
+ if trace_locating then
+ report_resolvers("unknown file %s", filename)
+ end
+ return nil
+ end
+end
+
+-- the next ones can use the previous ones / combine
+
+function environment.loadluafile(filename, version)
+ local lucname, luaname, chunk
+ local basename = file.removesuffix(filename)
+ if basename == filename then
+ lucname, luaname = basename .. ".luc", basename .. ".lua"
+ else
+ lucname, luaname = nil, basename -- forced suffix
+ end
+ -- when not overloaded by explicit suffix we look for a luc file first
+ local fullname = (lucname and environment.luafile(lucname)) or ""
+ if fullname ~= "" then
+ if trace_locating then
+ report_resolvers("loading %s", fullname)
+ end
+ chunk = loadfile(fullname) -- this way we don't need a file exists check
+ end
+ if chunk then
+ assert(chunk)()
+ if version then
+ -- we check of the version number of this chunk matches
+ local v = version -- can be nil
+ if modules and modules[filename] then
+ v = modules[filename].version -- new method
+ elseif versions and versions[filename] then
+ v = versions[filename] -- old method
+ end
+ if v == version then
+ return true
+ else
+ if trace_locating then
+ report_resolvers("version mismatch for %s: lua=%s, luc=%s", filename, v, version)
+ end
+ environment.loadluafile(filename)
+ end
+ else
+ return true
+ end
+ end
+ fullname = (luaname and environment.luafile(luaname)) or ""
+ if fullname ~= "" then
+ if trace_locating then
+ report_resolvers("loading %s", fullname)
+ end
+ chunk = loadfile(fullname) -- this way we don't need a file exists check
+ if not chunk then
+ if trace_locating then
+ report_resolvers("unknown file %s", filename)
+ end
+ else
+ assert(chunk)()
+ return true
+ end
+ end
+ return false
+end
+
end -- of closure
@@ -3906,6 +5048,8 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
+local report_xml = logs.new("xml")
+
--[[ldx--
<p>The parser used here is inspired by the variant discussed in the lua book, but
handles comment and processing instructions, has a different structure, provides
@@ -3920,7 +5064,6 @@ optimize the code.</p>
xml = xml or { }
---~ local xml = xml
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
@@ -4044,7 +5187,7 @@ local dcache, hcache, acache = { }, { }, { }
local mt = { }
-function initialize_mt(root)
+local function initialize_mt(root)
mt = { __index = root } -- will be redefined later
end
@@ -4148,7 +5291,7 @@ local reported_attribute_errors = { }
local function attribute_value_error(str)
if not reported_attribute_errors[str] then
- logs.report("xml","invalid attribute value: %q",str)
+ report_xml("invalid attribute value: %q",str)
reported_attribute_errors[str] = true
at._error_ = str
end
@@ -4156,7 +5299,7 @@ local function attribute_value_error(str)
end
local function attribute_specification_error(str)
if not reported_attribute_errors[str] then
- logs.report("xml","invalid attribute specification: %q",str)
+ report_xml("invalid attribute specification: %q",str)
reported_attribute_errors[str] = true
at._error_ = str
end
@@ -4219,18 +5362,18 @@ local function handle_hex_entity(str)
h = unify_predefined and predefined_unified[n]
if h then
if trace_entities then
- logs.report("xml","utfize, converting hex entity &#x%s; into %s",str,h)
+ report_xml("utfize, converting hex entity &#x%s; into %s",str,h)
end
elseif utfize then
h = (n and utfchar(n)) or xml.unknown_hex_entity_format(str) or ""
if not n then
- logs.report("xml","utfize, ignoring hex entity &#x%s;",str)
+ report_xml("utfize, ignoring hex entity &#x%s;",str)
elseif trace_entities then
- logs.report("xml","utfize, converting hex entity &#x%s; into %s",str,h)
+ report_xml("utfize, converting hex entity &#x%s; into %s",str,h)
end
else
if trace_entities then
- logs.report("xml","found entity &#x%s;",str)
+ report_xml("found entity &#x%s;",str)
end
h = "&#x" .. str .. ";"
end
@@ -4246,18 +5389,18 @@ local function handle_dec_entity(str)
d = unify_predefined and predefined_unified[n]
if d then
if trace_entities then
- logs.report("xml","utfize, converting dec entity &#%s; into %s",str,d)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
elseif utfize then
d = (n and utfchar(n)) or xml.unknown_dec_entity_format(str) or ""
if not n then
- logs.report("xml","utfize, ignoring dec entity &#%s;",str)
+ report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- logs.report("xml","utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,h)
end
else
if trace_entities then
- logs.report("xml","found entity &#%s;",str)
+ report_xml("found entity &#%s;",str)
end
d = "&#" .. str .. ";"
end
@@ -4282,7 +5425,7 @@ local function handle_any_entity(str)
end
if a then
if trace_entities then
- logs.report("xml","resolved entity &%s; -> %s (internal)",str,a)
+ report_xml("resolved entity &%s; -> %s (internal)",str,a)
end
a = lpegmatch(parsedentity,a) or a
else
@@ -4291,11 +5434,11 @@ local function handle_any_entity(str)
end
if a then
if trace_entities then
- logs.report("xml","resolved entity &%s; -> %s (external)",str,a)
+ report_xml("resolved entity &%s; -> %s (external)",str,a)
end
else
if trace_entities then
- logs.report("xml","keeping entity &%s;",str)
+ report_xml("keeping entity &%s;",str)
end
if str == "" then
a = "&error;"
@@ -4307,7 +5450,7 @@ local function handle_any_entity(str)
acache[str] = a
elseif trace_entities then
if not acache[str] then
- logs.report("xml","converting entity &%s; into %s",str,a)
+ report_xml("converting entity &%s; into %s",str,a)
acache[str] = a
end
end
@@ -4316,7 +5459,7 @@ local function handle_any_entity(str)
local a = acache[str]
if not a then
if trace_entities then
- logs.report("xml","found entity &%s;",str)
+ report_xml("found entity &%s;",str)
end
a = resolve_predefined and predefined_simplified[str]
if a then
@@ -4335,7 +5478,7 @@ local function handle_any_entity(str)
end
local function handle_end_entity(chr)
- logs.report("xml","error in entity, %q found instead of ';'",chr)
+ report_xml("error in entity, %q found instead of ';'",chr)
end
local space = S(' \r\n\t')
@@ -4470,7 +5613,7 @@ local function xmlconvert(data, settings)
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, result, entities = { }, { }, { }, { }, nil, nil, settings.entities or { }
+ stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -4498,6 +5641,7 @@ local function xmlconvert(data, settings)
else
errorstr = "invalid xml file - no text at all"
end
+ local result
if errorstr and errorstr ~= "" then
result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={ }, er = true } } }
setmetatable(stack, mt)
@@ -4678,7 +5822,7 @@ local function verbose_element(e,handlers)
ats[#ats+1] = format('%s=%q',k,v)
end
end
- if ern and trace_remap and ern ~= ens then
+ if ern and trace_entities and ern ~= ens then
ens = ern
end
if ens ~= "" then
@@ -4809,7 +5953,7 @@ local function newhandlers(settings)
if settings then
for k,v in next, settings do
if type(v) == "table" then
- tk = t[k] if not tk then tk = { } t[k] = tk end
+ local tk = t[k] if not tk then tk = { } t[k] = tk end
for kk,vv in next, v do
tk[kk] = vv
end
@@ -4920,7 +6064,7 @@ local function xmltext(root) -- inline
return (root and xmltostring(root)) or ""
end
-function initialize_mt(root)
+initialize_mt = function(root) -- redefinition
mt = { __tostring = xmltext, __index = root }
end
@@ -4955,7 +6099,6 @@ xml.string = xmlstring
<p>A few helpers:</p>
--ldx]]--
---~ xmlsetproperty(root,"settings",settings)
function xml.settings(e)
while e do
@@ -5117,6 +6260,8 @@ local trace_lpath = false if trackers then trackers.register("xml.path",
local trace_lparse = false if trackers then trackers.register("xml.parse", function(v) trace_lparse = v end) end
local trace_lprofile = false if trackers then trackers.register("xml.profile", function(v) trace_lpath = v trace_lparse = v trace_lprofile = v end) end
+local report_lpath = logs.new("lpath")
+
--[[ldx--
<p>We've now arrived at an interesting part: accessing the tree using a subset
of <l n='xpath'/> and since we're not compatible we call it <l n='lpath'/>. We
@@ -5143,7 +6288,7 @@ local function fallback (t, name)
if fn then
t[name] = fn
else
- logs.report("xml","unknown sub finalizer '%s'",tostring(name))
+ report_lpath("unknown sub finalizer '%s'",tostring(name))
fn = function() end
end
return fn
@@ -5204,11 +6349,6 @@ apply_axis['root'] = function(list)
end
apply_axis['self'] = function(list)
---~ local collected = { }
---~ for l=1,#list do
---~ collected[#collected+1] = list[l]
---~ end
---~ return collected
return list
end
@@ -5335,38 +6475,10 @@ apply_axis['namespace'] = function(list)
end
apply_axis['following'] = function(list) -- incomplete
---~ local collected = { }
---~ for l=1,#list do
---~ local ll = list[l]
---~ local p = ll.__p__
---~ local d = p.dt
---~ for i=ll.ni+1,#d do
---~ local di = d[i]
---~ if type(di) == "table" then
---~ collected[#collected+1] = di
---~ break
---~ end
---~ end
---~ end
---~ return collected
return { }
end
apply_axis['preceding'] = function(list) -- incomplete
---~ local collected = { }
---~ for l=1,#list do
---~ local ll = list[l]
---~ local p = ll.__p__
---~ local d = p.dt
---~ for i=ll.ni-1,1,-1 do
---~ local di = d[i]
---~ if type(di) == "table" then
---~ collected[#collected+1] = di
---~ break
---~ end
---~ end
---~ end
---~ return collected
return { }
end
@@ -5629,14 +6741,12 @@ local converter = Cs (
)
cleaner = Cs ( (
---~ lp_fastpos +
lp_reserved +
lp_number +
lp_string +
1 )^1 )
---~ expr
local template_e = [[
local expr = xml.expressions
@@ -5687,13 +6797,13 @@ local skip = { }
local function errorrunner_e(str,cnv)
if not skip[str] then
- logs.report("lpath","error in expression: %s => %s",str,cnv)
+ report_lpath("error in expression: %s => %s",str,cnv)
skip[str] = cnv or str
end
return false
end
local function errorrunner_f(str,arg)
- logs.report("lpath","error in finalizer: %s(%s)",str,arg or "")
+ report_lpath("error in finalizer: %s(%s)",str,arg or "")
return false
end
@@ -5860,7 +6970,7 @@ local function lshow(parsed)
end
local s = table.serialize_functions -- ugly
table.serialize_functions = false -- ugly
- logs.report("lpath","%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,table.serialize(parsed,false))
+ report_lpath("%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,table.serialize(parsed,false))
table.serialize_functions = s -- ugly
end
@@ -5890,7 +7000,7 @@ parse_pattern = function (pattern) -- the gain of caching is rather minimal
local np = #parsed
if np == 0 then
parsed = { pattern = pattern, register_self, state = "parsing error" }
- logs.report("lpath","parsing error in '%s'",pattern)
+ report_lpath("parsing error in '%s'",pattern)
lshow(parsed)
else
-- we could have done this with a more complex parser but this
@@ -5994,32 +7104,32 @@ local function traced_apply(list,parsed,nofparsed,order)
if trace_lparse then
lshow(parsed)
end
- logs.report("lpath", "collecting : %s",parsed.pattern)
- logs.report("lpath", " root tags : %s",tagstostring(list))
- logs.report("lpath", " order : %s",order or "unset")
+ report_lpath("collecting : %s",parsed.pattern)
+ report_lpath(" root tags : %s",tagstostring(list))
+ report_lpath(" order : %s",order or "unset")
local collected = list
for i=1,nofparsed do
local pi = parsed[i]
local kind = pi.kind
if kind == "axis" then
collected = apply_axis[pi.axis](collected)
- logs.report("lpath", "% 10i : ax : %s",(collected and #collected) or 0,pi.axis)
+ report_lpath("% 10i : ax : %s",(collected and #collected) or 0,pi.axis)
elseif kind == "nodes" then
collected = apply_nodes(collected,pi.nodetest,pi.nodes)
- logs.report("lpath", "% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest))
+ report_lpath("% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest))
elseif kind == "expression" then
collected = apply_expression(collected,pi.evaluator,order)
- logs.report("lpath", "% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted)
+ report_lpath("% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted)
elseif kind == "finalizer" then
collected = pi.finalizer(collected)
- logs.report("lpath", "% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
+ report_lpath("% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
return collected
end
if not collected or #collected == 0 then
local pn = i < nofparsed and parsed[nofparsed]
if pn and pn.kind == "finalizer" then
collected = pn.finalizer(collected)
- logs.report("lpath", "% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "")
+ report_lpath("% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "")
return collected
end
return nil
@@ -6132,7 +7242,7 @@ expressions.boolean = toboolean
-- user interface
local function traverse(root,pattern,handle)
- logs.report("xml","use 'xml.selection' instead for '%s'",pattern)
+ report_lpath("use 'xml.selection' instead for '%s'",pattern)
local collected = parse_apply({ root },pattern)
if collected then
for c=1,#collected do
@@ -6180,7 +7290,7 @@ local function dofunction(collected,fnc)
f(collected[c])
end
else
- logs.report("xml","unknown function '%s'",fnc)
+ report_lpath("unknown function '%s'",fnc)
end
end
end
@@ -6372,7 +7482,6 @@ local function xmlgsub(t,old,new) -- will be replaced
end
end
---~ xml.gsub = xmlgsub
function xml.strip_leading_spaces(dk,d,k) -- cosmetic, for manual
if d and k then
@@ -6384,12 +7493,7 @@ function xml.strip_leading_spaces(dk,d,k) -- cosmetic, for manual
end
end
---~ xml.escapes = { ['&'] = '&amp;', ['<'] = '&lt;', ['>'] = '&gt;', ['"'] = '&quot;' }
---~ xml.unescapes = { } for k,v in next, xml.escapes do xml.unescapes[v] = k end
---~ function xml.escaped (str) return (gsub(str,"(.)" , xml.escapes )) end
---~ function xml.unescaped(str) return (gsub(str,"(&.-;)", xml.unescapes)) end
---~ function xml.cleansed (str) return (gsub(str,"<.->" , '' )) end -- "%b<>"
local P, S, R, C, V, Cc, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Cs
@@ -6455,6 +7559,8 @@ if not modules then modules = { } end modules ['lxml-aux'] = {
local trace_manipulations = false trackers.register("lxml.manipulations", function(v) trace_manipulations = v end)
+local report_xml = logs.new("xml")
+
local xmlparseapply, xmlconvert, xmlcopy, xmlname = xml.parse_apply, xml.convert, xml.copy, xml.name
local xmlinheritedconvert = xml.inheritedconvert
@@ -6463,7 +7569,7 @@ local insert, remove = table.insert, table.remove
local gmatch, gsub = string.gmatch, string.gsub
local function report(what,pattern,c,e)
- logs.report("xml","%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
+ report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
end
local function withelements(e,handle,depth)
@@ -6616,12 +7722,7 @@ local function xmltoelement(whatever,root)
return whatever -- string
end
if element then
- --~ if element.ri then
- --~ element = element.dt[element.ri].dt
- --~ else
- --~ element = element.dt
- --~ end
- end
+ end
return element
end
@@ -6760,9 +7861,6 @@ local function include(xmldata,pattern,attribute,recursive,loaddata)
-- for the moment hard coded
epdt[ek.ni] = xml.escaped(data) -- d[k] = xml.escaped(data)
else
---~ local settings = xmldata.settings
---~ settings.parent_root = xmldata -- to be tested
---~ local xi = xmlconvert(data,settings)
local xi = xmlinheritedconvert(data,xmldata)
if not xi then
epdt[ek.ni] = "" -- xml.empty(d,k)
@@ -6779,28 +7877,7 @@ end
xml.include = include
---~ local function manipulate(xmldata,pattern,manipulator) -- untested and might go away
---~ local collected = xmlparseapply({ xmldata },pattern)
---~ if collected then
---~ local xmltostring = xml.tostring
---~ for c=1,#collected do
---~ local e = collected[c]
---~ local data = manipulator(xmltostring(e))
---~ if data == "" then
---~ epdt[e.ni] = ""
---~ else
---~ local xi = xmlinheritedconvert(data,xmldata)
---~ if not xi then
---~ epdt[e.ni] = ""
---~ else
---~ epdt[e.ni] = xml.body(xi) -- xml.assign(d,k,xi)
---~ end
---~ end
---~ end
---~ end
---~ end
-
---~ xml.manipulate = manipulate
+
function xml.strip_whitespace(root, pattern, nolines) -- strips all leading and trailing space !
local collected = xmlparseapply({ root },pattern)
@@ -6826,8 +7903,7 @@ function xml.strip_whitespace(root, pattern, nolines) -- strips all leading and
end
end
else
- --~ str.ni = i
- t[#t+1] = str
+ t[#t+1] = str
end
end
e.dt = t
@@ -7285,825 +8361,1137 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['luat-env'] = {
+if not modules then modules = { } end modules ['data-ini'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ license = "see context related readme files",
}
--- A former version provided functionality for non embeded core
--- scripts i.e. runtime library loading. Given the amount of
--- Lua code we use now, this no longer makes sense. Much of this
--- evolved before bytecode arrays were available and so a lot of
--- code has disappeared already.
+local gsub, find, gmatch = string.gsub, string.find, string.gmatch
+local concat = table.concat
+local next, type = next, type
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join
-local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find
-local unquote, quote = string.unquote, string.quote
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end)
+local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
--- precautions
+local report_resolvers = logs.new("resolvers")
-os.setlocale(nil,nil) -- useless feature and even dangerous in luatex
+local ostype, osname, ossetenv, osgetenv = os.type, os.name, os.setenv, os.getenv
-function os.setlocale()
- -- no way you can mess with it
-end
-
--- dirty tricks
-
-if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaonly" then
- arg[-1]=arg[0] arg[0]=arg[2] for k=3,#arg do arg[k-2]=arg[k] end arg[#arg]=nil arg[#arg]=nil
-end
+-- The code here used to be part of a data-res but for convenience
+-- we now split it over multiple files. As this file is now the
+-- starting point we introduce resolvers here.
-if profiler and os.env["MTX_PROFILE_RUN"] == "YES" then
- profiler.start("luatex-profile.log")
-end
+resolvers = resolvers or { }
--- environment
+-- We don't want the kpse library to kick in. Also, we want to be able to
+-- execute programs. Control over execution is implemented later.
-environment = environment or { }
-environment.arguments = { }
-environment.files = { }
-environment.sortedflags = nil
+texconfig.kpse_init = false
+texconfig.shell_escape = 't'
-if not environment.jobname or environment.jobname == "" then if tex then environment.jobname = tex.jobname end end
-if not environment.version or environment.version == "" then environment.version = "unknown" end
-if not environment.jobname then environment.jobname = "unknown" end
+kpse = { original = kpse }
-function environment.initialize_arguments(arg)
- local arguments, files = { }, { }
- environment.arguments, environment.files, environment.sortedflags = arguments, files, nil
- for index=1,#arg do
- local argument = arg[index]
- if index > 0 then
- local flag, value = match(argument,"^%-+(.-)=(.-)$")
- if flag then
- arguments[flag] = unquote(value or "")
- else
- flag = match(argument,"^%-+(.+)")
- if flag then
- arguments[flag] = true
- else
- files[#files+1] = argument
- end
+setmetatable(kpse, {
+ __index = function(kp,name)
+ local r = resolvers[name]
+ if not r then
+ r = function (...)
+ report_resolvers("not supported: %s(%s)",name,concat(...))
end
+ rawset(kp,name,r)
end
+ return r
end
- environment.ownname = environment.ownname or arg[0] or 'unknown.lua'
+} )
+
+-- First we check a couple of environment variables. Some might be
+-- set already but we need then later on. We start with the system
+-- font path.
+
+do
+
+ local osfontdir = osgetenv("OSFONTDIR")
+
+ if osfontdir and osfontdir ~= "" then
+ -- ok
+ elseif osname == "windows" then
+ ossetenv("OSFONTDIR","c:/windows/fonts//")
+ elseif osname == "macosx" then
+ ossetenv("OSFONTDIR","$HOME/Library/Fonts//;/Library/Fonts//;/System/Library/Fonts//")
+ end
+
end
-function environment.setargument(name,value)
- environment.arguments[name] = value
+-- Next comes the user's home path. We need this as later on we have
+-- to replace ~ with its value.
+
+do
+
+ local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or '~'
+
+ homedir = file.collapse_path(homedir)
+
+ ossetenv("HOME", homedir) -- can be used in unix cnf files
+ ossetenv("USERPROFILE",homedir) -- can be used in windows cnf files
+
+ environment.homedir = homedir
+
end
--- todo: defaults, better checks e.g on type (boolean versus string)
---
--- tricky: too many hits when we support partials unless we add
--- a registration of arguments so from now on we have 'partial'
+-- The following code sets the name of the own binary and its
+-- path. This is fallback code as we have os.selfdir now.
-function environment.argument(name,partial)
- local arguments, sortedflags = environment.arguments, environment.sortedflags
- if arguments[name] then
- return arguments[name]
- elseif partial then
- if not sortedflags then
- sortedflags = table.sortedkeys(arguments)
- for k=1,#sortedflags do
- sortedflags[k] = "^" .. sortedflags[k]
- end
- environment.sortedflags = sortedflags
+do
+
+ local args = environment.original_arguments or arg -- this needs a cleanup
+
+ local ownbin = environment.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex"
+ local ownpath = environment.ownpath or os.selfdir
+
+ ownbin = file.collapse_path(ownbin)
+ ownpath = file.collapse_path(ownpath)
+
+ if not ownpath or ownpath == "" or ownpath == "unset" then
+ ownpath = args[-1] or arg[-1]
+ ownpath = ownpath and filedirname(gsub(ownpath,"\\","/"))
+ if not ownpath or ownpath == "" then
+ ownpath = args[-0] or arg[-0]
+ ownpath = ownpath and filedirname(gsub(ownpath,"\\","/"))
end
- -- example of potential clash: ^mode ^modefile
- for k=1,#sortedflags do
- local v = sortedflags[k]
- if find(name,v) then
- return arguments[sub(v,2,#v)]
+ local binary = ownbin
+ if not ownpath or ownpath == "" then
+ ownpath = ownpath and filedirname(binary)
+ end
+ if not ownpath or ownpath == "" then
+ if os.binsuffix ~= "" then
+ binary = file.replacesuffix(binary,os.binsuffix)
+ end
+ local path = osgetenv("PATH")
+ if path then
+ for p in gmatch(path,"[^"..io.pathseparator.."]+") do
+ local b = filejoin(p,binary)
+ if lfs.isfile(b) then
+ -- we assume that after changing to the path the currentdir function
+ -- resolves to the real location and use this side effect here; this
+ -- trick is needed because on the mac installations use symlinks in the
+ -- path instead of real locations
+ local olddir = lfs.currentdir()
+ if lfs.chdir(p) then
+ local pp = lfs.currentdir()
+ if trace_locating and p ~= pp then
+ report_resolvers("following symlink '%s' to '%s'",p,pp)
+ end
+ ownpath = pp
+ lfs.chdir(olddir)
+ else
+ if trace_locating then
+ report_resolvers("unable to check path '%s'",p)
+ end
+ ownpath = p
+ end
+ break
+ end
+ end
end
end
+ if not ownpath or ownpath == "" then
+ ownpath = "."
+ report_resolvers("forcing fallback ownpath .")
+ elseif trace_locating then
+ report_resolvers("using ownpath '%s'",ownpath)
+ end
end
- return nil
+
+ environment.ownbin = ownbin
+ environment.ownpath = ownpath
+
end
-environment.argument("x",true)
+resolvers.ownpath = environment.ownpath
-function environment.split_arguments(separator) -- rather special, cut-off before separator
- local done, before, after = false, { }, { }
- local original_arguments = environment.original_arguments
- for k=1,#original_arguments do
- local v = original_arguments[k]
- if not done and v == separator then
- done = true
- elseif done then
- after[#after+1] = v
- else
- before[#before+1] = v
- end
- end
- return before, after
+function resolvers.getownpath()
+ return environment.ownpath
end
-function environment.reconstruct_commandline(arg,noquote)
- arg = arg or environment.original_arguments
- if noquote and #arg == 1 then
- local a = arg[1]
- a = resolvers.resolve(a)
- a = unquote(a)
- return a
- elseif #arg > 0 then
- local result = { }
- for i=1,#arg do
- local a = arg[i]
- a = resolvers.resolve(a)
- a = unquote(a)
- a = gsub(a,'"','\\"') -- tricky
- if find(a," ") then
- result[#result+1] = quote(a)
- else
- result[#result+1] = a
- end
- end
- return table.join(result," ")
+-- The self variables permit us to use only a few (or even no)
+-- environment variables.
+
+do
+
+ local ownpath = environment.ownpath or dir.current()
+
+ if ownpath then
+ ossetenv('SELFAUTOLOC', file.collapse_path(ownpath))
+ ossetenv('SELFAUTODIR', file.collapse_path(ownpath .. "/.."))
+ ossetenv('SELFAUTOPARENT', file.collapse_path(ownpath .. "/../.."))
else
- return ""
+ report_resolvers("error: unable to locate ownpath")
+ os.exit()
end
+
end
-if arg then
+-- The running os:
- -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later)
- local newarg, instring = { }, false
+-- todo: check is context sits here os.platform is more trustworthy
+-- that the bin check as mtx-update runs from another path
- for index=1,#arg do
- local argument = arg[index]
- if find(argument,"^\"") then
- newarg[#newarg+1] = gsub(argument,"^\"","")
- if not find(argument,"\"$") then
- instring = true
- end
- elseif find(argument,"\"$") then
- newarg[#newarg] = newarg[#newarg] .. " " .. gsub(argument,"\"$","")
- instring = false
- elseif instring then
- newarg[#newarg] = newarg[#newarg] .. " " .. argument
- else
- newarg[#newarg+1] = argument
- end
- end
- for i=1,-5,-1 do
- newarg[i] = arg[i]
- end
+local texos = environment.texos or osgetenv("TEXOS")
+local texmfos = environment.texmfos or osgetenv('SELFAUTODIR')
- environment.initialize_arguments(newarg)
- environment.original_arguments = newarg
- environment.raw_arguments = arg
+if not texos or texos == "" then
+ texos = file.basename(texmfos)
+end
- arg = { } -- prevent duplicate handling
+ossetenv('TEXMFOS', texmfos) -- full bin path
+ossetenv('TEXOS', texos) -- partial bin parent
+ossetenv('SELFAUTOSYSTEM',os.platform) -- bonus
-end
+environment.texos = texos
+environment.texmfos = texmfos
--- weird place ... depends on a not yet loaded module
+-- The current root:
-function environment.texfile(filename)
- return resolvers.find_file(filename,'tex')
-end
+local texroot = environment.texroot or osgetenv("TEXROOT")
-function environment.luafile(filename)
- local resolved = resolvers.find_file(filename,'tex') or ""
- if resolved ~= "" then
- return resolved
- end
- resolved = resolvers.find_file(filename,'texmfscripts') or ""
- if resolved ~= "" then
- return resolved
- end
- return resolvers.find_file(filename,'luatexlibs') or ""
+if not texroot or texroot == "" then
+ texroot = osgetenv('SELFAUTOPARENT')
+ ossetenv('TEXROOT',texroot)
end
-environment.loadedluacode = loadfile -- can be overloaded
+environment.texroot = file.collapse_path(texroot)
---~ function environment.loadedluacode(name)
---~ if os.spawn("texluac -s -o texluac.luc " .. name) == 0 then
---~ local chunk = loadstring(io.loaddata("texluac.luc"))
---~ os.remove("texluac.luc")
---~ return chunk
---~ else
---~ environment.loadedluacode = loadfile -- can be overloaded
---~ return loadfile(name)
---~ end
---~ end
-
-function environment.luafilechunk(filename) -- used for loading lua bytecode in the format
- filename = file.replacesuffix(filename, "lua")
- local fullname = environment.luafile(filename)
- if fullname and fullname ~= "" then
- if trace_locating then
- logs.report("fileio","loading file %s", fullname)
- end
- return environment.loadedluacode(fullname)
- else
- if trace_locating then
- logs.report("fileio","unknown file %s", filename)
- end
- return nil
+-- Tracing. Todo ...
+
+function resolvers.settrace(n) -- no longer number but: 'locating' or 'detail'
+ if n then
+ trackers.disable("resolvers.*")
+ trackers.enable("resolvers."..n)
end
end
--- the next ones can use the previous ones / combine
+resolvers.settrace(osgetenv("MTX_INPUT_TRACE"))
-function environment.loadluafile(filename, version)
- local lucname, luaname, chunk
- local basename = file.removesuffix(filename)
- if basename == filename then
- lucname, luaname = basename .. ".luc", basename .. ".lua"
- else
- lucname, luaname = nil, basename -- forced suffix
- end
- -- when not overloaded by explicit suffix we look for a luc file first
- local fullname = (lucname and environment.luafile(lucname)) or ""
- if fullname ~= "" then
- if trace_locating then
- logs.report("fileio","loading %s", fullname)
- end
- chunk = loadfile(fullname) -- this way we don't need a file exists check
- end
- if chunk then
- assert(chunk)()
- if version then
- -- we check of the version number of this chunk matches
- local v = version -- can be nil
- if modules and modules[filename] then
- v = modules[filename].version -- new method
- elseif versions and versions[filename] then
- v = versions[filename] -- old method
- end
- if v == version then
- return true
- else
- if trace_locating then
- logs.report("fileio","version mismatch for %s: lua=%s, luc=%s", filename, v, version)
- end
- environment.loadluafile(filename)
- end
- else
- return true
- end
- end
- fullname = (luaname and environment.luafile(luaname)) or ""
- if fullname ~= "" then
- if trace_locating then
- logs.report("fileio","loading %s", fullname)
- end
- chunk = loadfile(fullname) -- this way we don't need a file exists check
- if not chunk then
- if trace_locating then
- logs.report("fileio","unknown file %s", filename)
- end
- else
- assert(chunk)()
- return true
- end
- end
- return false
-end
+-- todo:
+
+-- if profiler and osgetenv("MTX_PROFILE_RUN") == "YES" then
+-- profiler.start("luatex-profile.log")
+-- end
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['trac-inf'] = {
+if not modules then modules = { } end modules ['data-exp'] = {
version = 1.001,
- comment = "companion to trac-inf.mkiv",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ license = "see context related readme files",
}
-local format = string.format
+local format, gsub, find, gmatch, lower = string.format, string.gsub, string.find, string.gmatch, string.lower
+local concat, sort = table.concat, table.sort
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local lpegCt, lpegCs, lpegP, lpegC, lpegS = lpeg.Ct, lpeg.Cs, lpeg.P, lpeg.C, lpeg.S
+local type, next = type, next
-local statusinfo, n, registered = { }, 0, { }
+local ostype = os.type
+local collapse_path = file.collapse_path
-statistics = statistics or { }
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
-statistics.enable = true
-statistics.threshold = 0.05
+local report_resolvers = logs.new("resolvers")
--- timing functions
+-- As this bit of code is somewhat special it gets its own module. After
+-- all, when working on the main resolver code, I don't want to scroll
+-- past this every time.
-local clock = os.gettimeofday or os.clock
+-- {a,b,c,d}
+-- a,b,c/{p,q,r},d
+-- a,b,c/{p,q,r}/d/{x,y,z}//
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
+
+-- this one is better and faster, but it took me a while to realize
+-- that this kind of replacement is cleaner than messy parsing and
+-- fuzzy concatenating we can probably gain a bit with selectively
+-- applying lpeg, but experiments with lpeg parsing this proved not to
+-- work that well; the parsing is ok, but dealing with the resulting
+-- table is a pain because we need to work inside-out recursively
-local notimer
+local dummy_path_expr = "^!*unset/*$"
-function statistics.hastimer(instance)
- return instance and instance.starttime
+local function do_first(a,b)
+ local t = { }
+ for s in gmatch(b,"[^,]+") do t[#t+1] = a .. s end
+ return "{" .. concat(t,",") .. "}"
end
-function statistics.resettiming(instance)
- if not instance then
- notimer = { timing = 0, loadtime = 0 }
- else
- instance.timing, instance.loadtime = 0, 0
+local function do_second(a,b)
+ local t = { }
+ for s in gmatch(a,"[^,]+") do t[#t+1] = s .. b end
+ return "{" .. concat(t,",") .. "}"
+end
+
+local function do_both(a,b)
+ local t = { }
+ for sa in gmatch(a,"[^,]+") do
+ for sb in gmatch(b,"[^,]+") do
+ t[#t+1] = sa .. sb
+ end
end
+ return "{" .. concat(t,",") .. "}"
+end
+
+local function do_three(a,b,c)
+ return a .. b.. c
end
-function statistics.starttiming(instance)
- if not instance then
- notimer = { }
- instance = notimer
+local stripper_1 = lpeg.stripper("{}@")
+
+local replacer_1 = lpeg.replacer {
+ { ",}", ",@}" },
+ { "{,", "{@," },
+}
+
+local function splitpathexpr(str, newlist, validate)
+ -- no need for further optimization as it is only called a
+ -- few times, we can use lpeg for the sub
+ if trace_expansions then
+ report_resolvers("expanding variable '%s'",str)
end
- local it = instance.timing
- if not it then
- it = 0
+ local t, ok, done = newlist or { }, false, false
+ str = lpegmatch(replacer_1,str)
+ while true do
+ done = false
+ while true do
+ str, ok = gsub(str,"([^{},]+){([^{}]+)}",do_first)
+ if ok > 0 then done = true else break end
+ end
+ while true do
+ str, ok = gsub(str,"{([^{}]+)}([^{},]+)",do_second)
+ if ok > 0 then done = true else break end
+ end
+ while true do
+ str, ok = gsub(str,"{([^{}]+)}{([^{}]+)}",do_both)
+ if ok > 0 then done = true else break end
+ end
+ str, ok = gsub(str,"({[^{}]*){([^{}]+)}([^{}]*})",do_three)
+ if ok > 0 then done = true end
+ if not done then break end
end
- if it == 0 then
- instance.starttime = clock()
- if not instance.loadtime then
- instance.loadtime = 0
+ str = lpegmatch(stripper_1,str)
+ if validate then
+ for s in gmatch(str,"[^,]+") do
+ s = validate(s)
+ if s then t[#t+1] = s end
end
else
---~ logs.report("system","nested timing (%s)",tostring(instance))
- end
- instance.timing = it + 1
-end
-
-function statistics.stoptiming(instance, report)
- if not instance then
- instance = notimer
+ for s in gmatch(str,"[^,]+") do
+ t[#t+1] = s
+ end
end
- if instance then
- local it = instance.timing
- if it > 1 then
- instance.timing = it - 1
- else
- local starttime = instance.starttime
- if starttime then
- local stoptime = clock()
- local loadtime = stoptime - starttime
- instance.stoptime = stoptime
- instance.loadtime = instance.loadtime + loadtime
- if report then
- statistics.report("load time %0.3f",loadtime)
- end
- instance.timing = 0
- return loadtime
- end
+ if trace_expansions then
+ for k=1,#t do
+ report_resolvers("% 4i: %s",k,t[k])
end
end
- return 0
+ return t
end
-function statistics.elapsedtime(instance)
- if not instance then
- instance = notimer
+local function validate(s)
+ local isrecursive = find(s,"//$")
+ s = collapse_path(s)
+ if isrecursive then
+ s = s .. "//"
end
- return format("%0.3f",(instance and instance.loadtime) or 0)
+ return s ~= "" and not find(s,dummy_path_expr) and s
end
-function statistics.elapsedindeed(instance)
- if not instance then
- instance = notimer
+resolvers.validated_path = validate -- keeps the trailing //
+
+function resolvers.expanded_path_from_list(pathlist) -- maybe not a list, just a path
+ -- a previous version fed back into pathlist
+ local newlist, ok = { }, false
+ for k=1,#pathlist do
+ if find(pathlist[k],"[{}]") then
+ ok = true
+ break
+ end
end
- local t = (instance and instance.loadtime) or 0
- return t > statistics.threshold
+ if ok then
+ for k=1,#pathlist do
+ splitpathexpr(pathlist[k],newlist,validate)
+ end
+ else
+ for k=1,#pathlist do
+ for p in gmatch(pathlist[k],"([^,]+)") do
+ p = validate(p)
+ if p ~= "" then newlist[#newlist+1] = p end
+ end
+ end
+ end
+ return newlist
end
-function statistics.elapsedseconds(instance,rest) -- returns nil if 0 seconds
- if statistics.elapsedindeed(instance) then
- return format("%s seconds %s", statistics.elapsedtime(instance),rest or "")
- end
+-- We also put some cleanup code here.
+
+local cleanup -- used recursively
+
+cleanup = lpeg.replacer {
+ { "!", "" },
+ { "\\", "/" },
+ { "~" , function() return lpegmatch(cleanup,environment.homedir) end },
+}
+
+function resolvers.clean_path(str)
+ return str and lpegmatch(cleanup,str)
end
--- general function
+-- This one strips quotes and funny tokens.
-function statistics.register(tag,fnc)
- if statistics.enable and type(fnc) == "function" then
- local rt = registered[tag] or (#statusinfo + 1)
- statusinfo[rt] = { tag, fnc }
- registered[tag] = rt
- if #tag > n then n = #tag end
- end
+
+local expandhome = lpegP("~") / "$HOME" -- environment.homedir
+
+local dodouble = lpegP('"')/"" * (expandhome + (1 - lpegP('"')))^0 * lpegP('"')/""
+local dosingle = lpegP("'")/"" * (expandhome + (1 - lpegP("'")))^0 * lpegP("'")/""
+local dostring = (expandhome + 1 )^0
+
+local stripper = lpegCs(
+ lpegpatterns.unspacer * (dosingle + dodouble + dostring) * lpegpatterns.unspacer
+)
+
+function resolvers.checked_variable(str) -- assumes str is a string
+ return lpegmatch(stripper,str) or str
end
-function statistics.show(reporter)
- if statistics.enable then
- if not reporter then reporter = function(tag,data,n) texio.write_nl(tag .. " " .. data) end end
- -- this code will move
- local register = statistics.register
- register("luatex banner", function()
- return string.lower(status.banner)
- end)
- register("control sequences", function()
- return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
- end)
- register("callbacks", function()
- local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
- return format("direct: %s, indirect: %s, total: %s", total-indirect, indirect, total)
- end)
- register("current memory usage", statistics.memused)
- register("runtime",statistics.runtime)
--- --
- for i=1,#statusinfo do
- local s = statusinfo[i]
- local r = s[2]()
- if r then
- reporter(s[1],r,n)
+-- The path splitter:
+
+-- A config (optionally) has the paths split in tables. Internally
+-- we join them and split them after the expansion has taken place. This
+-- is more convenient.
+
+
+local cache = { }
+
+local splitter = lpegCt(lpeg.splitat(lpegS(ostype == "windows" and ";" or ":;"))) -- maybe add ,
+
+local function split_configuration_path(str) -- beware, this can be either a path or a { specification }
+ if str then
+ local found = cache[str]
+ if not found then
+ if str == "" then
+ found = { }
+ else
+ str = gsub(str,"\\","/")
+ local split = lpegmatch(splitter,str)
+ found = { }
+ for i=1,#split do
+ local s = split[i]
+ if not find(s,"^{*unset}*") then
+ found[#found+1] = s
+ end
+ end
+ if trace_expansions then
+ report_resolvers("splitting path specification '%s'",str)
+ for k=1,#found do
+ report_resolvers("% 4i: %s",k,found[k])
+ end
+ end
+ cache[str] = found
end
end
- texio.write_nl("") -- final newline
- statistics.enable = false
+ return found
end
end
-function statistics.show_job_stat(tag,data,n)
- texio.write_nl(format("%-15s: %s - %s","mkiv lua stats",tag:rpadd(n," "),data))
-end
-
-function statistics.memused() -- no math.round yet -)
- local round = math.round or math.floor
- return format("%s MB (ctx: %s MB)",round(collectgarbage("count")/1000), round(status.luastate_bytes/1000000))
-end
+resolvers.split_configuration_path = split_configuration_path
-if statistics.runtime then
- -- already loaded and set
-elseif luatex and luatex.starttime then
- statistics.starttime = luatex.starttime
- statistics.loadtime = 0
- statistics.timing = 0
-else
- statistics.starttiming(statistics)
+function resolvers.split_path(str)
+ if type(str) == 'table' then
+ return str
+ else
+ return split_configuration_path(str)
+ end
end
-function statistics.runtime()
- statistics.stoptiming(statistics)
- return statistics.formatruntime(statistics.elapsedtime(statistics))
+function resolvers.join_path(str)
+ if type(str) == 'table' then
+ return file.join_path(str)
+ else
+ return str
+ end
end
-function statistics.formatruntime(runtime)
- return format("%s seconds", statistics.elapsedtime(statistics))
-end
+-- The next function scans directories and returns a hash where the
+-- entries are either strings or tables.
-function statistics.timed(action,report)
- local timer = { }
- report = report or logs.simple
- statistics.starttiming(timer)
- action()
- statistics.stoptiming(timer)
- report("total runtime: %s",statistics.elapsedtime(timer))
-end
+-- starting with . or .. etc or funny char
--- where, not really the best spot for this:
-commands = commands or { }
-local timer
-function commands.resettimer()
- statistics.resettiming(timer)
- statistics.starttiming(timer)
-end
+local weird = lpegP(".")^1 + lpeg.anywhere(lpegS("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
-function commands.elapsedtime()
- statistics.stoptiming(timer)
- tex.sprint(statistics.elapsedtime(timer))
+function resolvers.scan_files(specification)
+ if trace_locating then
+ report_resolvers("scanning path '%s'",specification)
+ end
+ local attributes, directory = lfs.attributes, lfs.dir
+ local files = { __path__ = specification }
+ local n, m, r = 0, 0, 0
+ local function scan(spec,path)
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ n = n + 1
+ local f = files[name]
+ if f then
+ if type(f) == 'string' then
+ files[name] = { f, path }
+ else
+ f[#f+1] = path
+ end
+ else -- probably unique anyway
+ files[name] = path
+ local lower = lower(name)
+ if name ~= lower then
+ files["remap:"..lower] = name
+ r = r + 1
+ end
+ end
+ elseif mode == 'directory' then
+ m = m + 1
+ if path ~= "" then
+ dirs[#dirs+1] = path..'/'..name
+ else
+ dirs[#dirs+1] = name
+ end
+ end
+ end
+ end
+ if #dirs > 0 then
+ sort(dirs)
+ for i=1,#dirs do
+ scan(spec,dirs[i])
+ end
+ end
+ end
+ scan(specification .. '/',"")
+ files.__files__, files.__directories__, files.__remappings__ = n, m, r
+ if trace_locating then
+ report_resolvers("%s files found on %s directories with %s uppercase remappings",n,m,r)
+ end
+ return files
end
-commands.resettimer()
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['trac-log'] = {
+if not modules then modules = { } end modules ['data-env'] = {
version = 1.001,
- comment = "companion to trac-log.mkiv",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ license = "see context related readme files",
}
--- this is old code that needs an overhaul
+local formats = { } resolvers.formats = formats
+local suffixes = { } resolvers.suffixes = suffixes
+local dangerous = { } resolvers.dangerous = dangerous
+local suffixmap = { } resolvers.suffixmap = suffixmap
+local alternatives = { } resolvers.alternatives = alternatives
+
+formats['afm'] = 'AFMFONTS' suffixes['afm'] = { 'afm' }
+formats['enc'] = 'ENCFONTS' suffixes['enc'] = { 'enc' }
+formats['fmt'] = 'TEXFORMATS' suffixes['fmt'] = { 'fmt' }
+formats['map'] = 'TEXFONTMAPS' suffixes['map'] = { 'map' }
+formats['mp'] = 'MPINPUTS' suffixes['mp'] = { 'mp' }
+formats['ocp'] = 'OCPINPUTS' suffixes['ocp'] = { 'ocp' }
+formats['ofm'] = 'OFMFONTS' suffixes['ofm'] = { 'ofm', 'tfm' }
+formats['otf'] = 'OPENTYPEFONTS' suffixes['otf'] = { 'otf' }
+formats['opl'] = 'OPLFONTS' suffixes['opl'] = { 'opl' }
+formats['otp'] = 'OTPINPUTS' suffixes['otp'] = { 'otp' }
+formats['ovf'] = 'OVFFONTS' suffixes['ovf'] = { 'ovf', 'vf' }
+formats['ovp'] = 'OVPFONTS' suffixes['ovp'] = { 'ovp' }
+formats['tex'] = 'TEXINPUTS' suffixes['tex'] = { 'tex' }
+formats['tfm'] = 'TFMFONTS' suffixes['tfm'] = { 'tfm' }
+formats['ttf'] = 'TTFONTS' suffixes['ttf'] = { 'ttf', 'ttc', 'dfont' }
+formats['pfb'] = 'T1FONTS' suffixes['pfb'] = { 'pfb', 'pfa' }
+formats['vf'] = 'VFFONTS' suffixes['vf'] = { 'vf' }
+formats['fea'] = 'FONTFEATURES' suffixes['fea'] = { 'fea' }
+formats['cid'] = 'FONTCIDMAPS' suffixes['cid'] = { 'cid', 'cidmap' }
+formats['texmfscripts'] = 'TEXMFSCRIPTS' suffixes['texmfscripts'] = { 'rb', 'pl', 'py' }
+formats['lua'] = 'LUAINPUTS' suffixes['lua'] = { 'lua', 'luc', 'tma', 'tmc' }
+formats['lib'] = 'CLUAINPUTS' suffixes['lib'] = (os.libsuffix and { os.libsuffix }) or { 'dll', 'so' }
---~ io.stdout:setvbuf("no")
---~ io.stderr:setvbuf("no")
-
-local write_nl, write = texio.write_nl or print, texio.write or io.write
-local format, gmatch = string.format, string.gmatch
-local texcount = tex and tex.count
+-- backward compatible ones
-if texlua then
- write_nl = print
- write = io.write
-end
+alternatives['map files'] = 'map'
+alternatives['enc files'] = 'enc'
+alternatives['cid maps'] = 'cid' -- great, why no cid files
+alternatives['font feature files'] = 'fea' -- and fea files here
+alternatives['opentype fonts'] = 'otf'
+alternatives['truetype fonts'] = 'ttf'
+alternatives['truetype collections'] = 'ttc'
+alternatives['truetype dictionary'] = 'dfont'
+alternatives['type1 fonts'] = 'pfb'
--[[ldx--
-<p>This is a prelude to a more extensive logging module. For the sake
-of parsing log files, in addition to the standard logging we will
-provide an <l n='xml'/> structured file. Actually, any logging that
-is hooked into callbacks will be \XML\ by default.</p>
+<p>If you wondered about some of the previous mappings, how about
+the next bunch:</p>
--ldx]]--
-logs = logs or { }
-logs.xml = logs.xml or { }
-logs.tex = logs.tex or { }
+-- kpse specific ones (a few omitted) .. we only add them for locating
+-- files that we don't use anyway
+
+formats['base'] = 'MFBASES' suffixes['base'] = { 'base', 'bas' }
+formats['bib'] = '' suffixes['bib'] = { 'bib' }
+formats['bitmap font'] = '' suffixes['bitmap font'] = { }
+formats['bst'] = '' suffixes['bst'] = { 'bst' }
+formats['cmap files'] = 'CMAPFONTS' suffixes['cmap files'] = { 'cmap' }
+formats['cnf'] = '' suffixes['cnf'] = { 'cnf' }
+formats['cweb'] = '' suffixes['cweb'] = { 'w', 'web', 'ch' }
+formats['dvips config'] = '' suffixes['dvips config'] = { }
+formats['gf'] = '' suffixes['gf'] = { '<resolution>gf' }
+formats['graphic/figure'] = '' suffixes['graphic/figure'] = { 'eps', 'epsi' }
+formats['ist'] = '' suffixes['ist'] = { 'ist' }
+formats['lig files'] = 'LIGFONTS' suffixes['lig files'] = { 'lig' }
+formats['ls-R'] = '' suffixes['ls-R'] = { }
+formats['mem'] = 'MPMEMS' suffixes['mem'] = { 'mem' }
+formats['MetaPost support'] = '' suffixes['MetaPost support'] = { }
+formats['mf'] = 'MFINPUTS' suffixes['mf'] = { 'mf' }
+formats['mft'] = '' suffixes['mft'] = { 'mft' }
+formats['misc fonts'] = '' suffixes['misc fonts'] = { }
+formats['other text files'] = '' suffixes['other text files'] = { }
+formats['other binary files'] = '' suffixes['other binary files'] = { }
+formats['pdftex config'] = 'PDFTEXCONFIG' suffixes['pdftex config'] = { }
+formats['pk'] = '' suffixes['pk'] = { '<resolution>pk' }
+formats['PostScript header'] = 'TEXPSHEADERS' suffixes['PostScript header'] = { 'pro' }
+formats['sfd'] = 'SFDFONTS' suffixes['sfd'] = { 'sfd' }
+formats['TeX system documentation'] = '' suffixes['TeX system documentation'] = { }
+formats['TeX system sources'] = '' suffixes['TeX system sources'] = { }
+formats['Troff fonts'] = '' suffixes['Troff fonts'] = { }
+formats['type42 fonts'] = 'T42FONTS' suffixes['type42 fonts'] = { }
+formats['web'] = '' suffixes['web'] = { 'web', 'ch' }
+formats['web2c files'] = 'WEB2C' suffixes['web2c files'] = { }
+formats['fontconfig files'] = 'FONTCONFIG_PATH' suffixes['fontconfig files'] = { } -- not unique
---[[ldx--
-<p>This looks pretty ugly but we need to speed things up a bit.</p>
---ldx]]--
+alternatives['subfont definition files'] = 'sfd'
-logs.moreinfo = [[
-more information about ConTeXt and the tools that come with it can be found at:
+-- A few accessors, mostly for command line tool.
-maillist : ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
-webpage : http://www.pragma-ade.nl / http://tex.aanhet.net
-wiki : http://contextgarden.net
-]]
+function resolvers.suffix_of_format(str)
+ local s = suffixes[str]
+ return s and s[1] or ""
+end
-logs.levels = {
- ['error'] = 1,
- ['warning'] = 2,
- ['info'] = 3,
- ['debug'] = 4,
-}
+function resolvers.suffixes_of_format(str)
+ return suffixes[str] or { }
+end
-logs.functions = {
- 'report', 'start', 'stop', 'push', 'pop', 'line', 'direct',
- 'start_run', 'stop_run',
- 'start_page_number', 'stop_page_number',
- 'report_output_pages', 'report_output_log',
- 'report_tex_stat', 'report_job_stat',
- 'show_open', 'show_close', 'show_load',
-}
+-- As we don't register additional suffixes anyway, we can as well
+-- freeze the reverse map here.
-logs.tracers = {
-}
+for name, suffixlist in next, suffixes do
+ for i=1,#suffixlist do
+ suffixmap[suffixlist[i]] = name
+ end
+end
-logs.level = 0
-logs.mode = string.lower((os.getenv("MTX.LOG.MODE") or os.getenv("MTX_LOG_MODE") or "tex"))
+setmetatable(suffixes, { __newindex = function(suffixes,name,suffixlist)
+ rawset(suffixes,name,suffixlist)
+ suffixes[name] = suffixlist
+ for i=1,#suffixlist do
+ suffixmap[suffixlist[i]] = name
+ end
+end } )
-function logs.set_level(level)
- logs.level = logs.levels[level] or level
+for name, format in next, formats do
+ dangerous[name] = true
end
-function logs.set_method(method)
- for _, v in next, logs.functions do
- logs[v] = logs[method][v] or function() end
- end
+-- because vf searching is somewhat dangerous, we want to prevent
+-- too liberal searching esp because we do a lookup on the current
+-- path anyway; only tex (or any) is safe
+
+dangerous.tex = nil
+
+
+-- more helpers
+
+function resolvers.format_of_var(str)
+ return formats[str] or formats[alternatives[str]] or ''
end
--- tex logging
+function resolvers.format_of_suffix(str) -- of file
+ return suffixmap[file.extname(str)] or 'tex'
+end
-function logs.tex.report(category,fmt,...) -- new
- if fmt then
- write_nl(category .. " | " .. format(fmt,...))
- else
- write_nl(category .. " |")
- end
+function resolvers.variable_of_format(str)
+ return formats[str] or formats[alternatives[str]] or ''
end
-function logs.tex.line(fmt,...) -- new
- if fmt then
- write_nl(format(fmt,...))
- else
- write_nl("")
+function resolvers.var_of_format_or_suffix(str)
+ local v = formats[str]
+ if v then
+ return v
+ end
+ v = formats[alternatives[str]]
+ if v then
+ return v
end
+ v = suffixmap[fileextname(str)]
+ if v then
+ return formats[v]
+ end
+ return ''
end
---~ function logs.tex.start_page_number()
---~ local real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
---~ if real > 0 then
---~ if user > 0 then
---~ if sub > 0 then
---~ write(format("[%s.%s.%s",real,user,sub))
---~ else
---~ write(format("[%s.%s",real,user))
---~ end
---~ else
---~ write(format("[%s",real))
---~ end
---~ else
---~ write("[-")
---~ end
---~ end
-
---~ function logs.tex.stop_page_number()
---~ write("]")
---~ end
-local real, user, sub
-function logs.tex.start_page_number()
- real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
-end
+end -- of closure
-function logs.tex.stop_page_number()
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- logs.report("pages", "flushing realpage %s, userpage %s, subpage %s",real,user,sub)
- else
- logs.report("pages", "flushing realpage %s, userpage %s",real,user)
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['data-tmp'] = {
+ version = 1.100,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--[[ldx--
+<p>This module deals with caching data. It sets up the paths and
+implements loaders and savers for tables. Best is to set the
+following variable. When not set, the usual paths will be
+checked. Personally I prefer the (users) temporary path.</p>
+
+</code>
+TEXMFCACHE=$TMP;$TEMP;$TMPDIR;$TEMPDIR;$HOME;$TEXMFVAR;$VARTEXMF;.
+</code>
+
+<p>Currently we do no locking when we write files. This is no real
+problem because most caching involves fonts and the chance of them
+being written at the same time is small. We also need to extend
+luatools with a recache feature.</p>
+--ldx]]--
+
+local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
+local mkdirs, isdir = dir.mkdirs, lfs.isdir
+
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end)
+
+local report_cache = logs.new("cache")
+
+local report_resolvers = logs.new("resolvers")
+
+caches = caches or { }
+
+caches.base = caches.base or "luatex-cache"
+caches.more = caches.more or "context"
+caches.direct = false -- true is faster but may need huge amounts of memory
+caches.tree = false
+caches.force = true
+caches.ask = false
+caches.defaults = { "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" }
+
+local writable, readables, usedreadables = nil, { }, { }
+
+-- we could use a metatable for writable and readable but not yet
+
+local function identify()
+ -- Combining the loops makes it messy. First we check the format cache path
+ -- and when the last component is not present we try to create it.
+ local texmfcaches = resolvers.clean_path_list("TEXMFCACHE")
+ if texmfcaches then
+ for k=1,#texmfcaches do
+ local cachepath = texmfcaches[k]
+ if cachepath ~= "" then
+ cachepath = resolvers.clean_path(cachepath)
+ cachepath = file.collapse_path(cachepath)
+ local valid = isdir(cachepath)
+ if valid then
+ if file.isreadable(cachepath) then
+ readables[#readables+1] = cachepath
+ if not writable and file.iswritable(cachepath) then
+ writable = cachepath
+ end
+ end
+ elseif not writable and caches.force then
+ local cacheparent = file.dirname(cachepath)
+ if file.iswritable(cacheparent) then
+ if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
+ mkdirs(cachepath)
+ if isdir(cachepath) and file.iswritable(cachepath) then
+ report_cache("created: %s",cachepath)
+ writable = cachepath
+ readables[#readables+1] = cachepath
+ end
+ end
+ end
+ end
end
- else
- logs.report("pages", "flushing realpage %s",real)
+ end
+ end
+ -- As a last resort we check some temporary paths but this time we don't
+ -- create them.
+ local texmfcaches = caches.defaults
+ if texmfcaches then
+ for k=1,#texmfcaches do
+ local cachepath = texmfcaches[k]
+ cachepath = resolvers.getenv(cachepath)
+ if cachepath ~= "" then
+ cachepath = resolvers.clean_path(cachepath)
+ local valid = isdir(cachepath)
+ if valid and file.isreadable(cachepath) then
+ if not writable and file.iswritable(cachepath) then
+ readables[#readables+1] = cachepath
+ writable = cachepath
+ break
+ end
+ end
+ end
+ end
+ end
+ -- Some extra checking. If we have no writable or readable path then we simply
+ -- quit.
+ if not writable then
+ report_cache("fatal error: there is no valid writable cache path defined")
+ os.exit()
+ elseif #readables == 0 then
+ report_cache("fatal error: there is no valid readable cache path defined")
+ os.exit()
+ end
+ -- why here
+ writable = dir.expand_name(resolvers.clean_path(writable)) -- just in case
+ -- moved here
+ local base, more, tree = caches.base, caches.more, caches.tree or caches.treehash() -- we have only one writable tree
+ if tree then
+ caches.tree = tree
+ writable = mkdirs(writable,base,more,tree)
+ for i=1,#readables do
+ readables[i] = file.join(readables[i],base,more,tree)
end
else
- logs.report("pages", "flushing page")
+ writable = mkdirs(writable,base,more)
+ for i=1,#readables do
+ readables[i] = file.join(readables[i],base,more)
+ end
end
- io.flush()
+ -- end
+ if trace_cache then
+ for i=1,#readables do
+ report_cache("using readable path '%s' (order %s)",readables[i],i)
+ end
+ report_cache("using writable path '%s'",writable)
+ end
+ identify = function()
+ return writable, readables
+ end
+ return writable, readables
end
-logs.tex.report_job_stat = statistics.show_job_stat
-
--- xml logging
-
-function logs.xml.report(category,fmt,...) -- new
- if fmt then
- write_nl(format("<r category='%s'>%s</r>",category,format(fmt,...)))
+function caches.usedpaths()
+ local writable, readables = identify()
+ if #readables > 1 then
+ local result = { }
+ for i=1,#readables do
+ local readable = readables[i]
+ if usedreadables[i] or readable == writable then
+ result[#result+1] = format("readable: '%s' (order %s)",readable,i)
+ end
+ end
+ result[#result+1] = format("writable: '%s'",writable)
+ return result
else
- write_nl(format("<r category='%s'/>",category))
+ return writable
end
end
-function logs.xml.line(fmt,...) -- new
- if fmt then
- write_nl(format("<r>%s</r>",format(fmt,...)))
+
+function caches.configfiles()
+ return table.concat(resolvers.instance.specification,";")
+end
+
+function caches.hashed(tree)
+ return md5.hex(gsub(lower(tree),"[\\\/]+","/"))
+end
+
+function caches.treehash()
+ local tree = caches.configfiles()
+ if not tree or tree == "" then
+ return false
else
- write_nl("<r/>")
+ return caches.hashed(tree)
end
end
-function logs.xml.start() if logs.level > 0 then tw("<%s>" ) end end
-function logs.xml.stop () if logs.level > 0 then tw("</%s>") end end
-function logs.xml.push () if logs.level > 0 then tw("<!-- ") end end
-function logs.xml.pop () if logs.level > 0 then tw(" -->" ) end end
+local r_cache, w_cache = { }, { } -- normally w in in r but who cares
-function logs.xml.start_run()
- write_nl("<?xml version='1.0' standalone='yes'?>")
- write_nl("<job>") -- xmlns='www.pragma-ade.com/luatex/schemas/context-job.rng'
- write_nl("")
+local function getreadablepaths(...) -- we can optimize this as we have at most 2 tags
+ local tags = { ... }
+ local hash = concat(tags,"/")
+ local done = r_cache[hash]
+ if not done then
+ local writable, readables = identify() -- exit if not found
+ if #tags > 0 then
+ done = { }
+ for i=1,#readables do
+ done[i] = file.join(readables[i],...)
+ end
+ else
+ done = readables
+ end
+ r_cache[hash] = done
+ end
+ return done
end
-function logs.xml.stop_run()
- write_nl("</job>")
+local function getwritablepath(...)
+ local tags = { ... }
+ local hash = concat(tags,"/")
+ local done = w_cache[hash]
+ if not done then
+ local writable, readables = identify() -- exit if not found
+ if #tags > 0 then
+ done = mkdirs(writable,...)
+ else
+ done = writable
+ end
+ w_cache[hash] = done
+ end
+ return done
end
-function logs.xml.start_page_number()
- write_nl(format("<p real='%s' page='%s' sub='%s'", texcount.realpageno, texcount.userpageno, texcount.subpageno))
-end
+caches.getreadablepaths = getreadablepaths
+caches.getwritablepath = getwritablepath
-function logs.xml.stop_page_number()
- write("/>")
- write_nl("")
+function caches.getfirstreadablefile(filename,...)
+ local rd = getreadablepaths(...)
+ for i=1,#rd do
+ local path = rd[i]
+ local fullname = file.join(path,filename)
+ if file.isreadable(fullname) then
+ usedreadables[i] = true
+ return fullname, path
+ end
+ end
+ return caches.setfirstwritablefile(filename,...)
end
-function logs.xml.report_output_pages(p,b)
- write_nl(format("<v k='pages' v='%s'/>", p))
- write_nl(format("<v k='bytes' v='%s'/>", b))
- write_nl("")
+function caches.setfirstwritablefile(filename,...)
+ local wr = getwritablepath(...)
+ local fullname = file.join(wr,filename)
+ return fullname, wr
end
-function logs.xml.report_output_log()
+function caches.define(category,subcategory) -- for old times sake
+ return function()
+ return getwritablepath(category,subcategory)
+ end
end
-function logs.xml.report_tex_stat(k,v)
- texiowrite_nl("log","<v k='"..k.."'>"..tostring(v).."</v>")
+function caches.setluanames(path,name)
+ return path .. "/" .. name .. ".tma", path .. "/" .. name .. ".tmc"
end
-local level = 0
-
-function logs.xml.show_open(name)
- level = level + 1
- texiowrite_nl(format("<f l='%s' n='%s'>",level,name))
+function caches.loaddata(readables,name)
+ if type(readables) == "string" then
+ readables = { readables }
+ end
+ for i=1,#readables do
+ local path = readables[i]
+ local tmaname, tmcname = caches.setluanames(path,name)
+ local loader = loadfile(tmcname) or loadfile(tmaname)
+ if loader then
+ loader = loader()
+ collectgarbage("step")
+ return loader
+ end
+ end
+ return false
end
-function logs.xml.show_close(name)
- texiowrite("</f> ")
- level = level - 1
+function caches.iswritable(filepath,filename)
+ local tmaname, tmcname = caches.setluanames(filepath,filename)
+ return file.iswritable(tmaname)
end
-function logs.xml.show_load(name)
- texiowrite_nl(format("<f l='%s' n='%s'/>",level+1,name))
+function caches.savedata(filepath,filename,data,raw)
+ local tmaname, tmcname = caches.setluanames(filepath,filename)
+ local reduce, simplify = true, true
+ if raw then
+ reduce, simplify = false, false
+ end
+ data.cache_uuid = os.uuid()
+ if caches.direct then
+ file.savedata(tmaname, table.serialize(data,'return',false,true,false)) -- no hex
+ else
+ table.tofile(tmaname, data,'return',false,true,false) -- maybe not the last true
+ end
+ local cleanup = resolvers.boolean_variable("PURGECACHE", false)
+ local strip = resolvers.boolean_variable("LUACSTRIP", true)
+ utils.lua.compile(tmaname, tmcname, cleanup, strip)
end
---
+-- moved from data-res:
-local name, banner = 'report', 'context'
+local content_state = { }
-local function report(category,fmt,...)
- if fmt then
- write_nl(format("%s | %s: %s",name,category,format(fmt,...)))
- elseif category then
- write_nl(format("%s | %s",name,category))
- else
- write_nl(format("%s |",name))
- end
+function caches.contentstate()
+ return content_state or { }
end
-local function simple(fmt,...)
- if fmt then
- write_nl(format("%s | %s",name,format(fmt,...)))
- else
- write_nl(format("%s |",name))
+function caches.loadcontent(cachename,dataname)
+ local name = caches.hashed(cachename)
+ local full, path = caches.getfirstreadablefile(name ..".lua","trees")
+ local filename = file.join(path,name)
+ local blob = loadfile(filename .. ".luc") or loadfile(filename .. ".lua")
+ if blob then
+ local data = blob()
+ if data and data.content and data.type == dataname and data.version == resolvers.cacheversion then
+ content_state[#content_state+1] = data.uuid
+ if trace_locating then
+ report_resolvers("loading '%s' for '%s' from '%s'",dataname,cachename,filename)
+ end
+ return data.content
+ elseif trace_locating then
+ report_resolvers("skipping '%s' for '%s' from '%s'",dataname,cachename,filename)
+ end
+ elseif trace_locating then
+ report_resolvers("skipping '%s' for '%s' from '%s'",dataname,cachename,filename)
end
end
-function logs.setprogram(_name_,_banner_,_verbose_)
- name, banner = _name_, _banner_
- if _verbose_ then
- trackers.enable("resolvers.locating")
- end
- logs.set_method("tex")
- logs.report = report -- also used in libraries
- logs.simple = simple -- only used in scripts !
- if utils then
- utils.report = simple
+function caches.collapsecontent(content)
+ for k, v in next, content do
+ if type(v) == "table" and #v == 1 then
+ content[k] = v[1]
+ end
end
- logs.verbose = _verbose_
end
-function logs.setverbose(what)
- if what then
- trackers.enable("resolvers.locating")
- else
- trackers.disable("resolvers.locating")
+function caches.savecontent(cachename,dataname,content)
+ local name = caches.hashed(cachename)
+ local full, path = caches.setfirstwritablefile(name ..".lua","trees")
+ local filename = file.join(path,name) -- is full
+ local luaname, lucname = filename .. ".lua", filename .. ".luc"
+ if trace_locating then
+ report_resolvers("preparing '%s' for '%s'",dataname,cachename)
+ end
+ local data = {
+ type = dataname,
+ root = cachename,
+ version = resolvers.cacheversion,
+ date = os.date("%Y-%m-%d"),
+ time = os.date("%H:%M:%S"),
+ content = content,
+ uuid = os.uuid(),
+ }
+ local ok = io.savedata(luaname,table.serialize(data,true))
+ if ok then
+ if trace_locating then
+ report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
+ end
+ if utils.lua.compile(luaname,lucname,false,true) then -- no cleanup but strip
+ if trace_locating then
+ report_resolvers("'%s' compiled to '%s'",dataname,lucname)
+ end
+ return true
+ else
+ if trace_locating then
+ report_resolvers("compiling failed for '%s', deleting file '%s'",dataname,lucname)
+ end
+ os.remove(lucname)
+ end
+ elseif trace_locating then
+ report_resolvers("unable to save '%s' in '%s' (access error)",dataname,luaname)
end
- logs.verbose = what or false
end
-function logs.extendbanner(_banner_,_verbose_)
- banner = banner .. " | ".. _banner_
- if _verbose_ ~= nil then
- logs.setverbose(what)
- end
-end
-logs.verbose = false
-logs.report = logs.tex.report
-logs.simple = logs.tex.report
-function logs.reportlines(str) -- todo: <lines></lines>
- for line in gmatch(str,"(.-)[\n\r]") do
- logs.report(line)
- end
-end
-function logs.reportline() -- for scripts too
- logs.report()
-end
+end -- of closure
-logs.simpleline = logs.reportline
+do -- create closure to overcome 200 locals limit
-function logs.reportbanner() -- for scripts too
- logs.report(banner)
-end
+if not modules then modules = { } end modules ['data-met'] = {
+ version = 1.100,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
-function logs.help(message,option)
- logs.reportbanner()
- logs.reportline()
- logs.reportlines(message)
- local moreinfo = logs.moreinfo or ""
- if moreinfo ~= "" and option ~= "nomoreinfo" then
- logs.reportline()
- logs.reportlines(moreinfo)
+local find = string.find
+
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+
+local report_resolvers = logs.new("resolvers")
+
+resolvers.locators = { notfound = { nil } } -- locate databases
+resolvers.hashers = { notfound = { nil } } -- load databases
+resolvers.generators = { notfound = { nil } } -- generate databases
+
+function resolvers.splitmethod(filename)
+ if not filename then
+ return { } -- safeguard
+ elseif type(filename) == "table" then
+ return filename -- already split
+ elseif not find(filename,"://") then
+ return { scheme="file", path = filename, original = filename } -- quick hack
+ else
+ return url.hashed(filename)
end
end
-logs.set_level('error')
-logs.set_method('tex')
-
-function logs.system(whereto,process,jobname,category,...)
- for i=1,10 do
- local f = io.open(whereto,"a")
- if f then
- f:write(format("%s %s => %s => %s => %s\r",os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...)))
- f:close()
- break
- else
- sleep(0.1)
+function resolvers.methodhandler(what, filename, filetype) -- ...
+ filename = file.collapse_path(filename)
+ local specification = (type(filename) == "string" and resolvers.splitmethod(filename)) or filename -- no or { }, let it bomb
+ local scheme = specification.scheme
+ local resolver = resolvers[what]
+ if resolver[scheme] then
+ if trace_locating then
+ report_resolvers("handler '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification))
end
+ return resolver[scheme](filename,filetype)
+ else
+ return resolver.tex(filename,filetype) -- todo: specification
end
end
---~ local syslogname = "oeps.xxx"
---~
---~ for i=1,10 do
---~ logs.system(syslogname,"context","test","fonts","font %s recached due to newer version (%s)","blabla","123")
---~ end
-
-function logs.fatal(where,...)
- logs.report(where,"fatal error: %s, aborting now",format(...))
- os.exit()
-end
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-inp'] = {
+if not modules then modules = { } end modules ['data-res'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -8111,70 +9499,45 @@ if not modules then modules = { } end modules ['data-inp'] = {
license = "see context related readme files",
}
--- After a few years using the code the large luat-inp.lua file
--- has been split up a bit. In the process some functionality was
--- dropped:
---
--- * support for reading lsr files
--- * selective scanning (subtrees)
--- * some public auxiliary functions were made private
---
--- TODO: os.getenv -> os.env[]
--- TODO: instances.[hashes,cnffiles,configurations,522]
--- TODO: check escaping in find etc, too much, too slow
-
--- This lib is multi-purpose and can be loaded again later on so that
--- additional functionality becomes available. We will split thislogs.report("fileio",
--- module in components once we're done with prototyping. This is the
--- first code I wrote for LuaTeX, so it needs some cleanup. Before changing
--- something in this module one can best check with Taco or Hans first; there
--- is some nasty trickery going on that relates to traditional kpse support.
-
--- To be considered: hash key lowercase, first entry in table filename
--- (any case), rest paths (so no need for optimization). Or maybe a
--- separate table that matches lowercase names to mixed case when
--- present. In that case the lower() cases can go away. I will do that
--- only when we run into problems with names ... well ... Iwona-Regular.
+-- In practice we will work within one tds tree, but i want to keep
+-- the option open to build tools that look at multiple trees, which is
+-- why we keep the tree specific data in a table. We used to pass the
+-- instance but for practical purposes we now avoid this and use a
+-- instance variable. We always have one instance active (sort of global).
--- Beware, loading and saving is overloaded in luat-tmp!
+-- todo: cache:/// home:///
local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
local next, type = next, type
-local lpegmatch = lpeg.match
-local trace_locating, trace_detail, trace_expansions = false, false, false
+local lpegP, lpegS, lpegR, lpegC, lpegCc, lpegCs, lpegCt = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-trackers.register("resolvers.locating", function(v) trace_locating = v end)
-trackers.register("resolvers.details", function(v) trace_detail = v end)
-trackers.register("resolvers.expansions", function(v) trace_expansions = v end) -- todo
+local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join
+local collapse_path = file.collapse_path
-if not resolvers then
- resolvers = {
- suffixes = { },
- formats = { },
- dangerous = { },
- suffixmap = { },
- alternatives = { },
- locators = { }, -- locate databases
- hashers = { }, -- load databases
- generators = { }, -- generate databases
- }
-end
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end)
+local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
+
+local report_resolvers = logs.new("resolvers")
-local resolvers = resolvers
+local expanded_path_from_list = resolvers.expanded_path_from_list
+local checked_variable = resolvers.checked_variable
+local split_configuration_path = resolvers.split_configuration_path
-resolvers.locators .notfound = { nil }
-resolvers.hashers .notfound = { nil }
-resolvers.generators.notfound = { nil }
+local ostype, osname, osenv, ossetenv, osgetenv = os.type, os.name, os.env, os.setenv, os.getenv
resolvers.cacheversion = '1.0.1'
-resolvers.cnfname = 'texmf.cnf'
-resolvers.luaname = 'texmfcnf.lua'
-resolvers.homedir = os.env[os.type == "windows" and 'USERPROFILE'] or os.env['HOME'] or '~'
-resolvers.cnfdefault = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}'
+resolvers.configbanner = ''
+resolvers.homedir = environment.homedir
+resolvers.criticalvars = { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF", "TEXMF", "TEXOS" }
+resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}' -- rubish path
+resolvers.luacnfname = 'texmfcnf.lua'
+resolvers.luacnfstate = "unknown"
-local dummy_path_expr = "^!*unset/*$"
+local unset_variable = "unset"
local formats = resolvers.formats
local suffixes = resolvers.suffixes
@@ -8182,104 +9545,12 @@ local dangerous = resolvers.dangerous
local suffixmap = resolvers.suffixmap
local alternatives = resolvers.alternatives
-formats['afm'] = 'AFMFONTS' suffixes['afm'] = { 'afm' }
-formats['enc'] = 'ENCFONTS' suffixes['enc'] = { 'enc' }
-formats['fmt'] = 'TEXFORMATS' suffixes['fmt'] = { 'fmt' }
-formats['map'] = 'TEXFONTMAPS' suffixes['map'] = { 'map' }
-formats['mp'] = 'MPINPUTS' suffixes['mp'] = { 'mp' }
-formats['ocp'] = 'OCPINPUTS' suffixes['ocp'] = { 'ocp' }
-formats['ofm'] = 'OFMFONTS' suffixes['ofm'] = { 'ofm', 'tfm' }
-formats['otf'] = 'OPENTYPEFONTS' suffixes['otf'] = { 'otf' } -- 'ttf'
-formats['opl'] = 'OPLFONTS' suffixes['opl'] = { 'opl' }
-formats['otp'] = 'OTPINPUTS' suffixes['otp'] = { 'otp' }
-formats['ovf'] = 'OVFFONTS' suffixes['ovf'] = { 'ovf', 'vf' }
-formats['ovp'] = 'OVPFONTS' suffixes['ovp'] = { 'ovp' }
-formats['tex'] = 'TEXINPUTS' suffixes['tex'] = { 'tex' }
-formats['tfm'] = 'TFMFONTS' suffixes['tfm'] = { 'tfm' }
-formats['ttf'] = 'TTFONTS' suffixes['ttf'] = { 'ttf', 'ttc', 'dfont' }
-formats['pfb'] = 'T1FONTS' suffixes['pfb'] = { 'pfb', 'pfa' }
-formats['vf'] = 'VFFONTS' suffixes['vf'] = { 'vf' }
-
-formats['fea'] = 'FONTFEATURES' suffixes['fea'] = { 'fea' }
-formats['cid'] = 'FONTCIDMAPS' suffixes['cid'] = { 'cid', 'cidmap' }
-
-formats ['texmfscripts'] = 'TEXMFSCRIPTS' -- new
-suffixes['texmfscripts'] = { 'rb', 'pl', 'py' } -- 'lua'
-
-formats ['lua'] = 'LUAINPUTS' -- new
-suffixes['lua'] = { 'lua', 'luc', 'tma', 'tmc' }
-
--- backward compatible ones
-
-alternatives['map files'] = 'map'
-alternatives['enc files'] = 'enc'
-alternatives['cid maps'] = 'cid' -- great, why no cid files
-alternatives['font feature files'] = 'fea' -- and fea files here
-alternatives['opentype fonts'] = 'otf'
-alternatives['truetype fonts'] = 'ttf'
-alternatives['truetype collections'] = 'ttc'
-alternatives['truetype dictionary'] = 'dfont'
-alternatives['type1 fonts'] = 'pfb'
-
--- obscure ones
-
-formats ['misc fonts'] = ''
-suffixes['misc fonts'] = { }
-
-formats ['sfd'] = 'SFDFONTS'
-suffixes ['sfd'] = { 'sfd' }
-alternatives['subfont definition files'] = 'sfd'
-
--- lib paths
-
-formats ['lib'] = 'CLUAINPUTS' -- new (needs checking)
-suffixes['lib'] = (os.libsuffix and { os.libsuffix }) or { 'dll', 'so' }
-
--- In practice we will work within one tds tree, but i want to keep
--- the option open to build tools that look at multiple trees, which is
--- why we keep the tree specific data in a table. We used to pass the
--- instance but for practical pusposes we now avoid this and use a
--- instance variable.
-
--- here we catch a few new thingies (todo: add these paths to context.tmf)
---
--- FONTFEATURES = .;$TEXMF/fonts/fea//
--- FONTCIDMAPS = .;$TEXMF/fonts/cid//
-
--- we always have one instance active
-
resolvers.instance = resolvers.instance or nil -- the current one (slow access)
-local instance = resolvers.instance or nil -- the current one (fast access)
+local instance = resolvers.instance or nil -- the current one (fast access)
function resolvers.newinstance()
- -- store once, freeze and faster (once reset we can best use
- -- instance.environment) maybe better have a register suffix
- -- function
-
- for k, v in next, suffixes do
- for i=1,#v do
- local vi = v[i]
- if vi then
- suffixmap[vi] = k
- end
- end
- end
-
- -- because vf searching is somewhat dangerous, we want to prevent
- -- too liberal searching esp because we do a lookup on the current
- -- path anyway; only tex (or any) is safe
-
- for k, v in next, formats do
- dangerous[k] = true
- end
- dangerous.tex = nil
-
- -- the instance
-
local newinstance = {
- rootpath = '',
- treepath = '',
progname = 'context',
engine = 'luatex',
format = '',
@@ -8287,26 +9558,19 @@ function resolvers.newinstance()
variables = { },
expansions = { },
files = { },
- remap = { },
- configuration = { },
- setup = { },
+ setups = { },
order = { },
found = { },
foundintrees = { },
- kpsevars = { },
+ origins = { },
hashes = { },
- cnffiles = { },
- luafiles = { },
+ specification = { },
lists = { },
remember = true,
diskcache = true,
renewcache = false,
- scandisk = true,
- cachepath = nil,
loaderror = false,
- sortdata = false,
savelists = true,
- cleanuppaths = true,
allresults = false,
pattern = nil, -- lists
data = { }, -- only for loading
@@ -8316,8 +9580,8 @@ function resolvers.newinstance()
local ne = newinstance.environment
- for k,v in next, os.env do
- ne[k] = resolvers.bare_variable(v)
+ for k, v in next, osenv do
+ ne[upper(k)] = checked_variable(v)
end
return newinstance
@@ -8339,91 +9603,68 @@ local function reset_hashes()
instance.found = { }
end
-local function check_configuration() -- not yet ok, no time for debugging now
- local ie, iv = instance.environment, instance.variables
- local function fix(varname,default)
- local proname = varname .. "." .. instance.progname or "crap"
- local p, v = ie[proname], ie[varname] or iv[varname]
- if not ((p and p ~= "") or (v and v ~= "")) then
- iv[varname] = default -- or environment?
- end
- end
- local name = os.name
- if name == "windows" then
- fix("OSFONTDIR", "c:/windows/fonts//")
- elseif name == "macosx" then
- fix("OSFONTDIR", "$HOME/Library/Fonts//;/Library/Fonts//;/System/Library/Fonts//")
- else
- -- bad luck
- end
- fix("LUAINPUTS" , ".;$TEXINPUTS;$TEXMFSCRIPTS") -- no progname, hm
- -- this will go away some day
- fix("FONTFEATURES", ".;$TEXMF/fonts/{data,fea}//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS")
- fix("FONTCIDMAPS" , ".;$TEXMF/fonts/{data,cid}//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS")
- --
- fix("LUATEXLIBS" , ".;$TEXMF/luatex/lua//")
-end
-
-function resolvers.bare_variable(str) -- assumes str is a string
- return (gsub(str,"\s*([\"\']?)(.+)%1\s*", "%2"))
-end
-
-function resolvers.settrace(n) -- no longer number but: 'locating' or 'detail'
- if n then
- trackers.disable("resolvers.*")
- trackers.enable("resolvers."..n)
+function resolvers.setenv(key,value)
+ if instance then
+ instance.environment[key] = value
+ ossetenv(key,value)
end
end
-resolvers.settrace(os.getenv("MTX_INPUT_TRACE"))
-
-function resolvers.osenv(key)
- local ie = instance.environment
- local value = ie[key]
- if value == nil then
- -- local e = os.getenv(key)
- local e = os.env[key]
- if e == nil then
- -- value = "" -- false
- else
- value = resolvers.bare_variable(e)
- end
- ie[key] = value
+function resolvers.getenv(key)
+ local value = instance.environment[key]
+ if value and value ~= "" then
+ return value
+ else
+ local e = osgetenv(key)
+ return e ~= nil and e ~= "" and checked_variable(e) or ""
end
- return value or ""
-end
-
-function resolvers.env(key)
- return instance.environment[key] or resolvers.osenv(key)
end
---
+resolvers.env = resolvers.getenv
local function expand_vars(lst) -- simple vars
- local variables, env = instance.variables, resolvers.env
+ local variables, getenv = instance.variables, resolvers.getenv
local function resolve(a)
- return variables[a] or env(a)
+ local va = variables[a] or ""
+ return (va ~= "" and va) or getenv(a) or ""
end
for k=1,#lst do
- lst[k] = gsub(lst[k],"%$([%a%d%_%-]+)",resolve)
+ local var = lst[k]
+ var = gsub(var,"%$([%a%d%_%-]+)",resolve)
+ var = gsub(var,";+",";")
+ var = gsub(var,";[!{}/\\]+;",";")
+ lst[k] = var
end
end
-local function expanded_var(var) -- simple vars
- local function resolve(a)
- return instance.variables[a] or resolvers.env(a)
+local function resolve(key)
+ local value = instance.variables[key]
+ if value and value ~= "" then
+ return value
+ end
+ local value = instance.environment[key]
+ if value and value ~= "" then
+ return value
end
- return (gsub(var,"%$([%a%d%_%-]+)",resolve))
+ local e = osgetenv(key)
+ return e ~= nil and e ~= "" and checked_variable(e) or ""
+end
+
+local function expanded_var(var) -- simple vars
+ var = gsub(var,"%$([%a%d%_%-]+)",resolve)
+ var = gsub(var,";+",";")
+ var = gsub(var,";[!{}/\\]+;",";")
+ return var
end
local function entry(entries,name)
- if name and (name ~= "") then
+ if name and name ~= "" then
name = gsub(name,'%$','')
local result = entries[name..'.'..instance.progname] or entries[name]
if result then
return result
else
- result = resolvers.env(name)
+ result = resolvers.getenv(name)
if result then
instance.variables[name] = result
resolvers.expand_variables()
@@ -8443,438 +9684,147 @@ local function is_entry(entries,name)
end
end
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
--- this one is better and faster, but it took me a while to realize
--- that this kind of replacement is cleaner than messy parsing and
--- fuzzy concatenating we can probably gain a bit with selectively
--- applying lpeg, but experiments with lpeg parsing this proved not to
--- work that well; the parsing is ok, but dealing with the resulting
--- table is a pain because we need to work inside-out recursively
-
-local function do_first(a,b)
- local t = { }
- for s in gmatch(b,"[^,]+") do t[#t+1] = a .. s end
- return "{" .. concat(t,",") .. "}"
-end
-
-local function do_second(a,b)
- local t = { }
- for s in gmatch(a,"[^,]+") do t[#t+1] = s .. b end
- return "{" .. concat(t,",") .. "}"
-end
-
-local function do_both(a,b)
- local t = { }
- for sa in gmatch(a,"[^,]+") do
- for sb in gmatch(b,"[^,]+") do
- t[#t+1] = sa .. sb
- end
- end
- return "{" .. concat(t,",") .. "}"
-end
-
-local function do_three(a,b,c)
- return a .. b.. c
-end
-
-local function splitpathexpr(str, t, validate)
- -- no need for further optimization as it is only called a
- -- few times, we can use lpeg for the sub
- if trace_expansions then
- logs.report("fileio","expanding variable '%s'",str)
- end
- t = t or { }
- str = gsub(str,",}",",@}")
- str = gsub(str,"{,","{@,")
- -- str = "@" .. str .. "@"
- local ok, done
- while true do
- done = false
- while true do
- str, ok = gsub(str,"([^{},]+){([^{}]+)}",do_first)
- if ok > 0 then done = true else break end
- end
- while true do
- str, ok = gsub(str,"{([^{}]+)}([^{},]+)",do_second)
- if ok > 0 then done = true else break end
- end
- while true do
- str, ok = gsub(str,"{([^{}]+)}{([^{}]+)}",do_both)
- if ok > 0 then done = true else break end
- end
- str, ok = gsub(str,"({[^{}]*){([^{}]+)}([^{}]*})",do_three)
- if ok > 0 then done = true end
- if not done then break end
- end
- str = gsub(str,"[{}]", "")
- str = gsub(str,"@","")
- if validate then
- for s in gmatch(str,"[^,]+") do
- s = validate(s)
- if s then t[#t+1] = s end
- end
- else
- for s in gmatch(str,"[^,]+") do
- t[#t+1] = s
- end
- end
- if trace_expansions then
- for k=1,#t do
- logs.report("fileio","% 4i: %s",k,t[k])
- end
- end
- return t
-end
-
-local function expanded_path_from_list(pathlist) -- maybe not a list, just a path
- -- a previous version fed back into pathlist
- local newlist, ok = { }, false
- for k=1,#pathlist do
- if find(pathlist[k],"[{}]") then
- ok = true
- break
- end
- end
- if ok then
- local function validate(s)
- s = file.collapse_path(s)
- return s ~= "" and not find(s,dummy_path_expr) and s
- end
- for k=1,#pathlist do
- splitpathexpr(pathlist[k],newlist,validate)
- end
- else
- for k=1,#pathlist do
- for p in gmatch(pathlist[k],"([^,]+)") do
- p = file.collapse_path(p)
- if p ~= "" then newlist[#newlist+1] = p end
- end
- end
- end
- return newlist
-end
-
--- we follow a rather traditional approach:
---
--- (1) texmf.cnf given in TEXMFCNF
--- (2) texmf.cnf searched in default variable
---
--- also we now follow the stupid route: if not set then just assume *one*
--- cnf file under texmf (i.e. distribution)
-
-local args = environment and environment.original_arguments or arg -- this needs a cleanup
-
-resolvers.ownbin = resolvers.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex"
-resolvers.ownbin = gsub(resolvers.ownbin,"\\","/")
-
-function resolvers.getownpath()
- local ownpath = resolvers.ownpath or os.selfdir
- if not ownpath or ownpath == "" or ownpath == "unset" then
- ownpath = args[-1] or arg[-1]
- ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/"))
- if not ownpath or ownpath == "" then
- ownpath = args[-0] or arg[-0]
- ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/"))
- end
- local binary = resolvers.ownbin
- if not ownpath or ownpath == "" then
- ownpath = ownpath and file.dirname(binary)
- end
- if not ownpath or ownpath == "" then
- if os.binsuffix ~= "" then
- binary = file.replacesuffix(binary,os.binsuffix)
- end
- for p in gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do
- local b = file.join(p,binary)
- if lfs.isfile(b) then
- -- we assume that after changing to the path the currentdir function
- -- resolves to the real location and use this side effect here; this
- -- trick is needed because on the mac installations use symlinks in the
- -- path instead of real locations
- local olddir = lfs.currentdir()
- if lfs.chdir(p) then
- local pp = lfs.currentdir()
- if trace_locating and p ~= pp then
- logs.report("fileio","following symlink '%s' to '%s'",p,pp)
- end
- ownpath = pp
- lfs.chdir(olddir)
- else
- if trace_locating then
- logs.report("fileio","unable to check path '%s'",p)
- end
- ownpath = p
- end
- break
- end
- end
- end
- if not ownpath or ownpath == "" then
- ownpath = "."
- logs.report("fileio","forcing fallback ownpath .")
- elseif trace_locating then
- logs.report("fileio","using ownpath '%s'",ownpath)
- end
- end
- resolvers.ownpath = ownpath
- function resolvers.getownpath()
- return resolvers.ownpath
- end
- return ownpath
-end
-
-local own_places = { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF" }
-
-local function identify_own()
- local ownpath = resolvers.getownpath() or dir.current()
- local ie = instance.environment
- if ownpath then
- if resolvers.env('SELFAUTOLOC') == "" then os.env['SELFAUTOLOC'] = file.collapse_path(ownpath) end
- if resolvers.env('SELFAUTODIR') == "" then os.env['SELFAUTODIR'] = file.collapse_path(ownpath .. "/..") end
- if resolvers.env('SELFAUTOPARENT') == "" then os.env['SELFAUTOPARENT'] = file.collapse_path(ownpath .. "/../..") end
- else
- logs.report("fileio","error: unable to locate ownpath")
- os.exit()
- end
- if resolvers.env('TEXMFCNF') == "" then os.env['TEXMFCNF'] = resolvers.cnfdefault end
- if resolvers.env('TEXOS') == "" then os.env['TEXOS'] = resolvers.env('SELFAUTODIR') end
- if resolvers.env('TEXROOT') == "" then os.env['TEXROOT'] = resolvers.env('SELFAUTOPARENT') end
+function resolvers.report_critical_variables()
if trace_locating then
- for i=1,#own_places do
- local v = own_places[i]
- logs.report("fileio","variable '%s' set to '%s'",v,resolvers.env(v) or "unknown")
+ for i=1,#resolvers.criticalvars do
+ local v = resolvers.criticalvars[i]
+ report_resolvers("variable '%s' set to '%s'",v,resolvers.getenv(v) or "unknown")
end
+ report_resolvers()
end
- identify_own = function() end
+ resolvers.report_critical_variables = function() end
end
-function resolvers.identify_cnf()
- if #instance.cnffiles == 0 then
- -- fallback
- identify_own()
- -- the real search
- resolvers.expand_variables()
- local t = resolvers.split_path(resolvers.env('TEXMFCNF'))
- t = expanded_path_from_list(t)
- expand_vars(t) -- redundant
- local function locate(filename,list)
- for i=1,#t do
- local ti = t[i]
- local texmfcnf = file.collapse_path(file.join(ti,filename))
- if lfs.isfile(texmfcnf) then
- list[#list+1] = texmfcnf
- end
- end
- end
- locate(resolvers.luaname,instance.luafiles)
- locate(resolvers.cnfname,instance.cnffiles)
- end
-end
-
-local function load_cnf_file(fname)
- fname = resolvers.clean_path(fname)
- local lname = file.replacesuffix(fname,'lua')
- if lfs.isfile(lname) then
- local dname = file.dirname(fname) -- fname ?
- if not instance.configuration[dname] then
- resolvers.load_data(dname,'configuration',lname and file.basename(lname))
- instance.order[#instance.order+1] = instance.configuration[dname]
+local function identify_configuration_files()
+ local specification = instance.specification
+ if #specification == 0 then
+ local cnfspec = resolvers.getenv('TEXMFCNF')
+ if cnfspec == "" then
+ cnfspec = resolvers.luacnfspec
+ resolvers.luacnfstate = "default"
+ else
+ resolvers.luacnfstate = "environment"
end
- else
- f = io.open(fname)
- if f then
- if trace_locating then
- logs.report("fileio","loading configuration file %s", fname)
- end
- local line, data, n, k, v
- local dname = file.dirname(fname)
- if not instance.configuration[dname] then
- instance.configuration[dname] = { }
- instance.order[#instance.order+1] = instance.configuration[dname]
- end
- local data = instance.configuration[dname]
- while true do
- local line, n = f:read(), 0
- if line then
- while true do -- join lines
- line, n = gsub(line,"\\%s*$", "")
- if n > 0 then
- line = line .. f:read()
- else
- break
+ resolvers.report_critical_variables()
+ resolvers.expand_variables()
+ local cnfpaths = expanded_path_from_list(resolvers.split_path(cnfspec))
+ expand_vars(cnfpaths) --- hm
+ local luacnfname = resolvers.luacnfname
+ for i=1,#cnfpaths do
+ local filename = collapse_path(filejoin(cnfpaths[i],luacnfname))
+ if lfs.isfile(filename) then
+ specification[#specification+1] = filename
+ end
+ end
+ end
+end
+
+local function load_configuration_files()
+ local specification = instance.specification
+ if #specification > 0 then
+ local luacnfname = resolvers.luacnfname
+ for i=1,#specification do
+ local filename = specification[i]
+ local pathname = filedirname(filename)
+ local filename = filejoin(pathname,luacnfname)
+ local blob = loadfile(filename)
+ if blob then
+ local data = blob()
+ data = data and data.content
+ local setups = instance.setups
+ if data then
+ if trace_locating then
+ report_resolvers("loading configuration file '%s'",filename)
+ report_resolvers()
+ end
+ -- flattening is easier to deal with as we need to collapse
+ local t = { }
+ for k, v in next, data do -- v = progname
+ if v ~= unset_variable then
+ local kind = type(v)
+ if kind == "string" then
+ t[k] = v
+ elseif kind == "table" then
+ -- this operates on the table directly
+ setters.initialize(filename,k,v)
+ -- this doesn't (maybe metatables some day)
+ for kk, vv in next, v do -- vv = variable
+ if vv ~= unset_variable then
+ if type(vv) == "string" then
+ t[kk.."."..k] = vv
+ end
+ end
+ end
+ else
+ -- report_resolvers("strange key '%s' in configuration file '%s'",k,filename)
+ end
end
end
- if not find(line,"^[%%#]") then
- local l = gsub(line,"%s*%%.*$","")
- local k, v = match(l,"%s*(.-)%s*=%s*(.-)%s*$")
- if k and v and not data[k] then
- v = gsub(v,"[%%#].*",'')
- data[k] = gsub(v,"~","$HOME")
- instance.kpsevars[k] = true
+ setups[pathname] = t
+
+ if resolvers.luacnfstate == "default" then
+ -- the following code is not tested
+ local cnfspec = t["TEXMFCNF"]
+ if cnfspec then
+ -- we push the value into the main environment (osenv) so
+ -- that it takes precedence over the default one and therefore
+ -- also over following definitions
+ resolvers.setenv('TEXMFCNF',cnfspec)
+ -- we now identify and load the specified configuration files
+ instance.specification = { }
+ identify_configuration_files()
+ load_configuration_files()
+ -- we prevent further overload of the configuration variable
+ resolvers.luacnfstate = "configuration"
+ -- we quit the outer loop
+ break
end
end
+
else
- break
+ if trace_locating then
+ report_resolvers("skipping configuration file '%s'",filename)
+ end
+ setups[pathname] = { }
+ instance.loaderror = true
end
+ elseif trace_locating then
+ report_resolvers("skipping configuration file '%s'",filename)
+ end
+ instance.order[#instance.order+1] = instance.setups[pathname]
+ if instance.loaderror then
+ break
end
- f:close()
- elseif trace_locating then
- logs.report("fileio","skipping configuration file '%s'", fname)
end
+ elseif trace_locating then
+ report_resolvers("warning: no lua configuration files found")
end
end
-local function collapse_cnf_data() -- potential optimization: pass start index (setup and configuration are shared)
- local order = instance.order
+local function collapse_configuration_data() -- potential optimization: pass start index (setup and configuration are shared)
+ local order, variables, environment, origins = instance.order, instance.variables, instance.environment, instance.origins
for i=1,#order do
local c = order[i]
for k,v in next, c do
- if not instance.variables[k] then
- if instance.environment[k] then
- instance.variables[k] = instance.environment[k]
+ if variables[k] then
+ -- okay
+ else
+ local ek = environment[k]
+ if ek and ek ~= "" then
+ variables[k], origins[k] = ek, "env"
else
- instance.kpsevars[k] = true
- instance.variables[k] = resolvers.bare_variable(v)
+ local bv = checked_variable(v)
+ variables[k], origins[k] = bv, "cnf"
end
end
end
end
end
-function resolvers.load_cnf()
- local function loadoldconfigdata()
- local cnffiles = instance.cnffiles
- for i=1,#cnffiles do
- load_cnf_file(cnffiles[i])
- end
- end
- -- instance.cnffiles contain complete names now !
- -- we still use a funny mix of cnf and new but soon
- -- we will switch to lua exclusively as we only use
- -- the file to collect the tree roots
- if #instance.cnffiles == 0 then
- if trace_locating then
- logs.report("fileio","no cnf files found (TEXMFCNF may not be set/known)")
- end
- else
- local cnffiles = instance.cnffiles
- instance.rootpath = cnffiles[1]
- for k=1,#cnffiles do
- instance.cnffiles[k] = file.collapse_path(cnffiles[k])
- end
- for i=1,3 do
- instance.rootpath = file.dirname(instance.rootpath)
- end
- instance.rootpath = file.collapse_path(instance.rootpath)
- if instance.diskcache and not instance.renewcache then
- resolvers.loadoldconfig(instance.cnffiles)
- if instance.loaderror then
- loadoldconfigdata()
- resolvers.saveoldconfig()
- end
- else
- loadoldconfigdata()
- if instance.renewcache then
- resolvers.saveoldconfig()
- end
- end
- collapse_cnf_data()
- end
- check_configuration()
-end
-
-function resolvers.load_lua()
- if #instance.luafiles == 0 then
- -- yet harmless
- else
- instance.rootpath = instance.luafiles[1]
- local luafiles = instance.luafiles
- for k=1,#luafiles do
- instance.luafiles[k] = file.collapse_path(luafiles[k])
- end
- for i=1,3 do
- instance.rootpath = file.dirname(instance.rootpath)
- end
- instance.rootpath = file.collapse_path(instance.rootpath)
- resolvers.loadnewconfig()
- collapse_cnf_data()
- end
- check_configuration()
-end
-
-- database loading
-function resolvers.load_hash()
- resolvers.locatelists()
- if instance.diskcache and not instance.renewcache then
- resolvers.loadfiles()
- if instance.loaderror then
- resolvers.loadlists()
- resolvers.savefiles()
- end
- else
- resolvers.loadlists()
- if instance.renewcache then
- resolvers.savefiles()
- end
- end
-end
-
-function resolvers.append_hash(type,tag,name)
- if trace_locating then
- logs.report("fileio","hash '%s' appended",tag)
- end
- insert(instance.hashes, { ['type']=type, ['tag']=tag, ['name']=name } )
-end
-
-function resolvers.prepend_hash(type,tag,name)
- if trace_locating then
- logs.report("fileio","hash '%s' prepended",tag)
- end
- insert(instance.hashes, 1, { ['type']=type, ['tag']=tag, ['name']=name } )
-end
-
-function resolvers.extend_texmf_var(specification) -- crap, we could better prepend the hash
--- local t = resolvers.expanded_path_list('TEXMF') -- full expansion
- local t = resolvers.split_path(resolvers.env('TEXMF'))
- insert(t,1,specification)
- local newspec = concat(t,";")
- if instance.environment["TEXMF"] then
- instance.environment["TEXMF"] = newspec
- elseif instance.variables["TEXMF"] then
- instance.variables["TEXMF"] = newspec
- else
- -- weird
- end
- resolvers.expand_variables()
- reset_hashes()
-end
-
-- locators
-function resolvers.locatelists()
- local texmfpaths = resolvers.clean_path_list('TEXMF')
- for i=1,#texmfpaths do
- local path = texmfpaths[i]
- if trace_locating then
- logs.report("fileio","locating list of '%s'",path)
- end
- resolvers.locatedatabase(file.collapse_path(path))
- end
-end
-
function resolvers.locatedatabase(specification)
return resolvers.methodhandler('locators', specification)
end
@@ -8882,11 +9832,11 @@ end
function resolvers.locators.tex(specification)
if specification and specification ~= '' and lfs.isdir(specification) then
if trace_locating then
- logs.report("fileio","tex locator '%s' found",specification)
+ report_resolvers("tex locator '%s' found",specification)
end
- resolvers.append_hash('file',specification,filename)
+ resolvers.append_hash('file',specification,filename,true) -- cache
elseif trace_locating then
- logs.report("fileio","tex locator '%s' not found",specification)
+ report_resolvers("tex locator '%s' not found",specification)
end
end
@@ -8896,9 +9846,8 @@ function resolvers.hashdatabase(tag,name)
return resolvers.methodhandler('hashers',tag,name)
end
-function resolvers.loadfiles()
- instance.loaderror = false
- instance.files = { }
+local function load_file_databases()
+ instance.loaderror, instance.files = false, { }
if not instance.renewcache then
local hashes = instance.hashes
for k=1,#hashes do
@@ -8909,194 +9858,134 @@ function resolvers.loadfiles()
end
end
-function resolvers.hashers.tex(tag,name)
- resolvers.load_data(tag,'files')
-end
-
--- generators:
-
-function resolvers.loadlists()
- local hashes = instance.hashes
- for i=1,#hashes do
- resolvers.generatedatabase(hashes[i].tag)
+function resolvers.hashers.tex(tag,name) -- used where?
+ local content = caches.loadcontent(tag,'files')
+ if content then
+ instance.files[tag] = content
+ else
+ instance.files[tag] = { }
+ instance.loaderror = true
end
end
-function resolvers.generatedatabase(specification)
- return resolvers.methodhandler('generators', specification)
-end
-
--- starting with . or .. etc or funny char
-
-local weird = lpeg.P(".")^1 + lpeg.anywhere(lpeg.S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
-
---~ local l_forbidden = lpeg.S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t")
---~ local l_confusing = lpeg.P(" ")
---~ local l_character = lpeg.patterns.utf8
---~ local l_dangerous = lpeg.P(".")
-
---~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * lpeg.P(-1)
---~ ----- l_normal = l_normal * lpeg.Cc(true) + lpeg.Cc(false)
-
---~ local function test(str)
---~ print(str,lpeg.match(l_normal,str))
---~ end
---~ test("ヒラギノ明朝 Pro W3")
---~ test("..ヒラギノ明朝 Pro W3")
---~ test(":ヒラギノ明朝 Pro W3;")
---~ test("ヒラギノ明朝 /Pro W3;")
---~ test("ヒラギノ明朝 Pro W3")
-
-function resolvers.generators.tex(specification)
- local tag = specification
- if trace_locating then
- logs.report("fileio","scanning path '%s'",specification)
- end
- instance.files[tag] = { }
- local files = instance.files[tag]
- local n, m, r = 0, 0, 0
- local spec = specification .. '/'
- local attributes = lfs.attributes
- local directory = lfs.dir
- local function action(path)
- local full
- if path then
- full = spec .. path .. '/'
- else
- full = spec
- end
- for name in directory(full) do
- if not lpegmatch(weird,name) then
- -- if lpegmatch(l_normal,name) then
- local mode = attributes(full..name,'mode')
- if mode == 'file' then
- if path then
- n = n + 1
- local f = files[name]
- if f then
- if type(f) == 'string' then
- files[name] = { f, path }
- else
- f[#f+1] = path
- end
- else -- probably unique anyway
- files[name] = path
- local lower = lower(name)
- if name ~= lower then
- files["remap:"..lower] = name
- r = r + 1
- end
- end
+local function locate_file_databases()
+ -- todo: cache:// and tree:// (runtime)
+ local texmfpaths = resolvers.expanded_path_list('TEXMF')
+ for i=1,#texmfpaths do
+ local path = collapse_path(texmfpaths[i])
+ local stripped = gsub(path,"^!!","")
+ local runtime = stripped == path
+ path = resolvers.clean_path(path)
+ if stripped ~= "" then
+ if lfs.isdir(path) then
+ local spec = resolvers.splitmethod(stripped)
+ if spec.scheme == "cache" then
+ stripped = spec.path
+ elseif runtime and (spec.noscheme or spec.scheme == "file") then
+ stripped = "tree:///" .. stripped
+ end
+ if trace_locating then
+ if runtime then
+ report_resolvers("locating list of '%s' (runtime)",path)
+ else
+ report_resolvers("locating list of '%s' (cached)",path)
end
- elseif mode == 'directory' then
- m = m + 1
- if path then
- action(path..'/'..name)
+ end
+ resolvers.locatedatabase(stripped) -- nothing done with result
+ else
+ if trace_locating then
+ if runtime then
+ report_resolvers("skipping list of '%s' (runtime)",path)
else
- action(name)
+ report_resolvers("skipping list of '%s' (cached)",path)
end
end
end
end
end
- action()
if trace_locating then
- logs.report("fileio","%s files found on %s directories with %s uppercase remappings",n,m,r)
+ report_resolvers()
end
end
--- savers, todo
-
-function resolvers.savefiles()
- resolvers.save_data('files')
+local function generate_file_databases()
+ local hashes = instance.hashes
+ for i=1,#hashes do
+ resolvers.methodhandler('generators',hashes[i].tag)
+ end
+ if trace_locating then
+ report_resolvers()
+ end
end
--- A config (optionally) has the paths split in tables. Internally
--- we join them and split them after the expansion has taken place. This
--- is more convenient.
-
---~ local checkedsplit = string.checkedsplit
-
-local cache = { }
-
-local splitter = lpeg.Ct(lpeg.splitat(lpeg.S(os.type == "windows" and ";" or ":;")))
-
-local function split_kpse_path(str) -- beware, this can be either a path or a {specification}
- local found = cache[str]
- if not found then
- if str == "" then
- found = { }
- else
- str = gsub(str,"\\","/")
---~ local split = (find(str,";") and checkedsplit(str,";")) or checkedsplit(str,io.pathseparator)
-local split = lpegmatch(splitter,str)
- found = { }
- for i=1,#split do
- local s = split[i]
- if not find(s,"^{*unset}*") then
- found[#found+1] = s
- end
- end
- if trace_expansions then
- logs.report("fileio","splitting path specification '%s'",str)
- for k=1,#found do
- logs.report("fileio","% 4i: %s",k,found[k])
- end
- end
- cache[str] = found
+local function save_file_databases() -- will become cachers
+ for i=1,#instance.hashes do
+ local hash = instance.hashes[i]
+ local cachename = hash.tag
+ if hash.cache then
+ local content = instance.files[cachename]
+ caches.collapsecontent(content)
+ caches.savecontent(cachename,"files",content)
+ elseif trace_locating then
+ report_resolvers("not saving runtime tree '%s'",cachename)
end
end
- return found
end
-resolvers.split_kpse_path = split_kpse_path
-
-function resolvers.splitconfig()
- for i=1,#instance do
- local c = instance[i]
- for k,v in next, c do
- if type(v) == 'string' then
- local t = split_kpse_path(v)
- if #t > 1 then
- c[k] = t
- end
- end
+local function load_databases()
+ locate_file_databases()
+ if instance.diskcache and not instance.renewcache then
+ load_file_databases()
+ if instance.loaderror then
+ generate_file_databases()
+ save_file_databases()
+ end
+ else
+ generate_file_databases()
+ if instance.renewcache then
+ save_file_databases()
end
end
end
-function resolvers.joinconfig()
- local order = instance.order
- for i=1,#order do
- local c = order[i]
- for k,v in next, c do -- indexed?
- if type(v) == 'table' then
- c[k] = file.join_path(v)
- end
- end
+function resolvers.append_hash(type,tag,name,cache)
+ if trace_locating then
+ report_resolvers("hash '%s' appended",tag)
end
+ insert(instance.hashes, { type = type, tag = tag, name = name, cache = cache } )
end
-function resolvers.split_path(str)
- if type(str) == 'table' then
- return str
- else
- return split_kpse_path(str)
+function resolvers.prepend_hash(type,tag,name,cache)
+ if trace_locating then
+ report_resolvers("hash '%s' prepended",tag)
end
+ insert(instance.hashes, 1, { type = type, tag = tag, name = name, cache = cache } )
end
-function resolvers.join_path(str)
- if type(str) == 'table' then
- return file.join_path(str)
+function resolvers.extend_texmf_var(specification) -- crap, we could better prepend the hash
+-- local t = resolvers.expanded_path_list('TEXMF') -- full expansion
+ local t = resolvers.split_path(resolvers.getenv('TEXMF'))
+ insert(t,1,specification)
+ local newspec = concat(t,";")
+ if instance.environment["TEXMF"] then
+ instance.environment["TEXMF"] = newspec
+ elseif instance.variables["TEXMF"] then
+ instance.variables["TEXMF"] = newspec
else
- return str
+ -- weird
end
+ resolvers.expand_variables()
+ reset_hashes()
+end
+
+function resolvers.generators.tex(specification,tag)
+ instance.files[tag or specification] = resolvers.scan_files(specification)
end
function resolvers.splitexpansions()
local ie = instance.expansions
for k,v in next, ie do
- local t, h, p = { }, { }, split_kpse_path(v)
+ local t, h, p = { }, { }, split_configuration_path(v)
for kk=1,#p do
local vv = p[kk]
if vv ~= "" and not h[vv] then
@@ -9114,222 +10003,22 @@ end
-- end of split/join code
-function resolvers.saveoldconfig()
- resolvers.splitconfig()
- resolvers.save_data('configuration')
- resolvers.joinconfig()
-end
-
-resolvers.configbanner = [[
--- This is a Luatex configuration file created by 'luatools.lua' or
--- 'luatex.exe' directly. For comment, suggestions and questions you can
--- contact the ConTeXt Development Team. This configuration file is
--- not copyrighted. [HH & TH]
-]]
-
-function resolvers.serialize(files)
- -- This version is somewhat optimized for the kind of
- -- tables that we deal with, so it's much faster than
- -- the generic serializer. This makes sense because
- -- luatools and mtxtools are called frequently. Okay,
- -- we pay a small price for properly tabbed tables.
- local t = { }
- local function dump(k,v,m) -- could be moved inline
- if type(v) == 'string' then
- return m .. "['" .. k .. "']='" .. v .. "',"
- elseif #v == 1 then
- return m .. "['" .. k .. "']='" .. v[1] .. "',"
- else
- return m .. "['" .. k .. "']={'" .. concat(v,"','").. "'},"
- end
- end
- t[#t+1] = "return {"
- if instance.sortdata then
- local sortedfiles = sortedkeys(files)
- for i=1,#sortedfiles do
- local k = sortedfiles[i]
- local fk = files[k]
- if type(fk) == 'table' then
- t[#t+1] = "\t['" .. k .. "']={"
- local sortedfk = sortedkeys(fk)
- for j=1,#sortedfk do
- local kk = sortedfk[j]
- t[#t+1] = dump(kk,fk[kk],"\t\t")
- end
- t[#t+1] = "\t},"
- else
- t[#t+1] = dump(k,fk,"\t")
- end
- end
- else
- for k, v in next, files do
- if type(v) == 'table' then
- t[#t+1] = "\t['" .. k .. "']={"
- for kk,vv in next, v do
- t[#t+1] = dump(kk,vv,"\t\t")
- end
- t[#t+1] = "\t},"
- else
- t[#t+1] = dump(k,v,"\t")
- end
- end
- end
- t[#t+1] = "}"
- return concat(t,"\n")
-end
-
-local data_state = { }
+-- we used to have 'files' and 'configurations' so therefore the following
+-- shared function
function resolvers.data_state()
- return data_state or { }
-end
-
-function resolvers.save_data(dataname, makename) -- untested without cache overload
- for cachename, files in next, instance[dataname] do
- local name = (makename or file.join)(cachename,dataname)
- local luaname, lucname = name .. ".lua", name .. ".luc"
- if trace_locating then
- logs.report("fileio","preparing '%s' for '%s'",dataname,cachename)
- end
- for k, v in next, files do
- if type(v) == "table" and #v == 1 then
- files[k] = v[1]
- end
- end
- local data = {
- type = dataname,
- root = cachename,
- version = resolvers.cacheversion,
- date = os.date("%Y-%m-%d"),
- time = os.date("%H:%M:%S"),
- content = files,
- uuid = os.uuid(),
- }
- local ok = io.savedata(luaname,resolvers.serialize(data))
- if ok then
- if trace_locating then
- logs.report("fileio","'%s' saved in '%s'",dataname,luaname)
- end
- if utils.lua.compile(luaname,lucname,false,true) then -- no cleanup but strip
- if trace_locating then
- logs.report("fileio","'%s' compiled to '%s'",dataname,lucname)
- end
- else
- if trace_locating then
- logs.report("fileio","compiling failed for '%s', deleting file '%s'",dataname,lucname)
- end
- os.remove(lucname)
- end
- elseif trace_locating then
- logs.report("fileio","unable to save '%s' in '%s' (access error)",dataname,luaname)
- end
- end
-end
-
-function resolvers.load_data(pathname,dataname,filename,makename) -- untested without cache overload
- filename = ((not filename or (filename == "")) and dataname) or filename
- filename = (makename and makename(dataname,filename)) or file.join(pathname,filename)
- local blob = loadfile(filename .. ".luc") or loadfile(filename .. ".lua")
- if blob then
- local data = blob()
- if data and data.content and data.type == dataname and data.version == resolvers.cacheversion then
- data_state[#data_state+1] = data.uuid
- if trace_locating then
- logs.report("fileio","loading '%s' for '%s' from '%s'",dataname,pathname,filename)
- end
- instance[dataname][pathname] = data.content
- else
- if trace_locating then
- logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename)
- end
- instance[dataname][pathname] = { }
- instance.loaderror = true
- end
- elseif trace_locating then
- logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename)
- end
-end
-
--- some day i'll use the nested approach, but not yet (actually we even drop
--- engine/progname support since we have only luatex now)
---
--- first texmfcnf.lua files are located, next the cached texmf.cnf files
---
--- return {
--- TEXMFBOGUS = 'effe checken of dit werkt',
--- }
-
-function resolvers.resetconfig()
- identify_own()
- instance.configuration, instance.setup, instance.order, instance.loaderror = { }, { }, { }, false
-end
-
-function resolvers.loadnewconfig()
- local luafiles = instance.luafiles
- for i=1,#luafiles do
- local cnf = luafiles[i]
- local pathname = file.dirname(cnf)
- local filename = file.join(pathname,resolvers.luaname)
- local blob = loadfile(filename)
- if blob then
- local data = blob()
- if data then
- if trace_locating then
- logs.report("fileio","loading configuration file '%s'",filename)
- end
- if true then
- -- flatten to variable.progname
- local t = { }
- for k, v in next, data do -- v = progname
- if type(v) == "string" then
- t[k] = v
- else
- for kk, vv in next, v do -- vv = variable
- if type(vv) == "string" then
- t[vv.."."..v] = kk
- end
- end
- end
- end
- instance['setup'][pathname] = t
- else
- instance['setup'][pathname] = data
- end
- else
- if trace_locating then
- logs.report("fileio","skipping configuration file '%s'",filename)
- end
- instance['setup'][pathname] = { }
- instance.loaderror = true
- end
- elseif trace_locating then
- logs.report("fileio","skipping configuration file '%s'",filename)
- end
- instance.order[#instance.order+1] = instance.setup[pathname]
- if instance.loaderror then break end
- end
-end
-
-function resolvers.loadoldconfig()
- if not instance.renewcache then
- local cnffiles = instance.cnffiles
- for i=1,#cnffiles do
- local cnf = cnffiles[i]
- local dname = file.dirname(cnf)
- resolvers.load_data(dname,'configuration')
- instance.order[#instance.order+1] = instance.configuration[dname]
- if instance.loaderror then break end
- end
- end
- resolvers.joinconfig()
+ return caches.contentstate()
end
function resolvers.expand_variables()
local expansions, environment, variables = { }, instance.environment, instance.variables
- local env = resolvers.env
+ local getenv = resolvers.getenv
instance.expansions = expansions
- if instance.engine ~= "" then environment['engine'] = instance.engine end
- if instance.progname ~= "" then environment['progname'] = instance.progname end
+ local engine, progname = instance.engine, instance.progname
+ if type(engine) ~= "string" then instance.engine, engine = "", "" end
+ if type(progname) ~= "string" then instance.progname, progname = "", "" end
+ if engine ~= "" then environment['engine'] = engine end
+ if progname ~= "" then environment['progname'] = progname end
for k,v in next, environment do
local a, b = match(k,"^(%a+)%_(.*)%s*$")
if a and b then
@@ -9338,7 +10027,7 @@ function resolvers.expand_variables()
expansions[k] = v
end
end
- for k,v in next, environment do -- move environment to expansions
+ for k,v in next, environment do -- move environment to expansions (variables are already in there)
if not expansions[k] then expansions[k] = v end
end
for k,v in next, variables do -- move variables to expansions
@@ -9347,7 +10036,7 @@ function resolvers.expand_variables()
local busy = false
local function resolve(a)
busy = true
- return expansions[a] or env(a)
+ return expansions[a] or getenv(a)
end
while true do
busy = false
@@ -9355,6 +10044,8 @@ function resolvers.expand_variables()
local s, n = gsub(v,"%$([%a%d%_%-]+)",resolve)
local s, m = gsub(s,"%$%{([%a%d%_%-]+)%}",resolve)
if n > 0 or m > 0 then
+ s = gsub(s,";+",";")
+ s = gsub(s,";[!{}/\\]+;",";")
expansions[k]= s
end
end
@@ -9391,63 +10082,59 @@ function resolvers.unexpanded_path(str)
return file.join_path(resolvers.unexpanded_path_list(str))
end
-do -- no longer needed
-
- local done = { }
+local done = { }
- function resolvers.reset_extra_path()
- local ep = instance.extra_paths
- if not ep then
- ep, done = { }, { }
- instance.extra_paths = ep
- elseif #ep > 0 then
- instance.lists, done = { }, { }
- end
+function resolvers.reset_extra_path()
+ local ep = instance.extra_paths
+ if not ep then
+ ep, done = { }, { }
+ instance.extra_paths = ep
+ elseif #ep > 0 then
+ instance.lists, done = { }, { }
end
+end
- function resolvers.register_extra_path(paths,subpaths)
- local ep = instance.extra_paths or { }
- local n = #ep
- if paths and paths ~= "" then
- if subpaths and subpaths ~= "" then
- for p in gmatch(paths,"[^,]+") do
- -- we gmatch each step again, not that fast, but used seldom
- for s in gmatch(subpaths,"[^,]+") do
- local ps = p .. "/" .. s
- if not done[ps] then
- ep[#ep+1] = resolvers.clean_path(ps)
- done[ps] = true
- end
- end
- end
- else
- for p in gmatch(paths,"[^,]+") do
- if not done[p] then
- ep[#ep+1] = resolvers.clean_path(p)
- done[p] = true
- end
- end
- end
- elseif subpaths and subpaths ~= "" then
- for i=1,n do
+function resolvers.register_extra_path(paths,subpaths)
+ local ep = instance.extra_paths or { }
+ local n = #ep
+ if paths and paths ~= "" then
+ if subpaths and subpaths ~= "" then
+ for p in gmatch(paths,"[^,]+") do
-- we gmatch each step again, not that fast, but used seldom
for s in gmatch(subpaths,"[^,]+") do
- local ps = ep[i] .. "/" .. s
+ local ps = p .. "/" .. s
if not done[ps] then
ep[#ep+1] = resolvers.clean_path(ps)
done[ps] = true
end
end
end
+ else
+ for p in gmatch(paths,"[^,]+") do
+ if not done[p] then
+ ep[#ep+1] = resolvers.clean_path(p)
+ done[p] = true
+ end
+ end
end
- if #ep > 0 then
- instance.extra_paths = ep -- register paths
- end
- if #ep > n then
- instance.lists = { } -- erase the cache
+ elseif subpaths and subpaths ~= "" then
+ for i=1,n do
+ -- we gmatch each step again, not that fast, but used seldom
+ for s in gmatch(subpaths,"[^,]+") do
+ local ps = ep[i] .. "/" .. s
+ if not done[ps] then
+ ep[#ep+1] = resolvers.clean_path(ps)
+ done[ps] = true
+ end
+ end
end
end
-
+ if #ep > 0 then
+ instance.extra_paths = ep -- register paths
+ end
+ if #ep > n then
+ instance.lists = { } -- erase the cache
+ end
end
local function made_list(instance,list)
@@ -9492,7 +10179,7 @@ function resolvers.clean_path_list(str)
local t = resolvers.expanded_path_list(str)
if t then
for i=1,#t do
- t[i] = file.collapse_path(resolvers.clean_path(t[i]))
+ t[i] = collapse_path(resolvers.clean_path(t[i]))
end
end
return t
@@ -9532,33 +10219,6 @@ function resolvers.expand_path_from_var(str)
return file.join_path(resolvers.expanded_path_list_from_var(str))
end
-function resolvers.format_of_var(str)
- return formats[str] or formats[alternatives[str]] or ''
-end
-function resolvers.format_of_suffix(str)
- return suffixmap[file.extname(str)] or 'tex'
-end
-
-function resolvers.variable_of_format(str)
- return formats[str] or formats[alternatives[str]] or ''
-end
-
-function resolvers.var_of_format_or_suffix(str)
- local v = formats[str]
- if v then
- return v
- end
- v = formats[alternatives[str]]
- if v then
- return v
- end
- v = suffixmap[file.extname(str)]
- if v then
- return formats[isf]
- end
- return ''
-end
-
function resolvers.expand_braces(str) -- output variable and brace expansion of STRING
local ori = resolvers.variable(str)
local pth = expanded_path_from_list(resolvers.split_path(ori))
@@ -9571,9 +10231,9 @@ function resolvers.isreadable.file(name)
local readable = lfs.isfile(name) -- brrr
if trace_detail then
if readable then
- logs.report("fileio","file '%s' is readable",name)
+ report_resolvers("file '%s' is readable",name)
else
- logs.report("fileio","file '%s' is not readable", name)
+ report_resolvers("file '%s' is not readable", name)
end
end
return readable
@@ -9589,10 +10249,10 @@ local function collect_files(names)
for k=1,#names do
local fname = names[k]
if trace_detail then
- logs.report("fileio","checking name '%s'",fname)
+ report_resolvers("checking name '%s'",fname)
end
- local bname = file.basename(fname)
- local dname = file.dirname(fname)
+ local bname = filebasename(fname)
+ local dname = filedirname(fname)
if dname == "" or find(dname,"^%.") then
dname = false
else
@@ -9605,7 +10265,7 @@ local function collect_files(names)
local files = blobpath and instance.files[blobpath]
if files then
if trace_detail then
- logs.report("fileio","deep checking '%s' (%s)",blobpath,bname)
+ report_resolvers("deep checking '%s' (%s)",blobpath,bname)
end
local blobfile = files[bname]
if not blobfile then
@@ -9617,53 +10277,38 @@ local function collect_files(names)
end
end
if blobfile then
+ local blobroot = files.__path__ or blobpath
if type(blobfile) == 'string' then
if not dname or find(blobfile,dname) then
- filelist[#filelist+1] = {
- hash.type,
- file.join(blobpath,blobfile,bname), -- search
- resolvers.concatinators[hash.type](blobpath,blobfile,bname) -- result
- }
+ local kind = hash.type
+ local search = filejoin(blobpath,blobfile,bname)
+ local result = resolvers.concatinators[hash.type](blobroot,blobfile,bname)
+ if trace_detail then
+ report_resolvers("match: kind '%s', search '%s', result '%s'",kind,search,result)
+ end
+ filelist[#filelist+1] = { kind, search, result }
end
else
for kk=1,#blobfile do
local vv = blobfile[kk]
if not dname or find(vv,dname) then
- filelist[#filelist+1] = {
- hash.type,
- file.join(blobpath,vv,bname), -- search
- resolvers.concatinators[hash.type](blobpath,vv,bname) -- result
- }
+ local kind = hash.type
+ local search = filejoin(blobpath,vv,bname)
+ local result = resolvers.concatinators[hash.type](blobroot,vv,bname)
+ if trace_detail then
+ report_resolvers("match: kind '%s', search '%s', result '%s'",kind,search,result)
+ end
+ filelist[#filelist+1] = { kind, search, result }
end
end
end
end
elseif trace_locating then
- logs.report("fileio","no match in '%s' (%s)",blobpath,bname)
+ report_resolvers("no match in '%s' (%s)",blobpath,bname)
end
end
end
- if #filelist > 0 then
- return filelist
- else
- return nil
- end
-end
-
-function resolvers.suffix_of_format(str)
- if suffixes[str] then
- return suffixes[str][1]
- else
- return ""
- end
-end
-
-function resolvers.suffixes_of_format(str)
- if suffixes[str] then
- return suffixes[str]
- else
- return {}
- end
+ return #filelist > 0 and filelist or nil
end
function resolvers.register_in_trees(name)
@@ -9683,27 +10328,28 @@ local function can_be_dir(name) -- can become local
fakepaths[name] = 2 -- no directory
end
end
- return (fakepaths[name] == 1)
+ return fakepaths[name] == 1
end
local function collect_instance_files(filename,collected) -- todo : plugin (scanners, checkers etc)
local result = collected or { }
local stamp = nil
- filename = file.collapse_path(filename)
+ filename = collapse_path(filename)
-- speed up / beware: format problem
if instance.remember then
stamp = filename .. "--" .. instance.engine .. "--" .. instance.progname .. "--" .. instance.format
if instance.found[stamp] then
if trace_locating then
- logs.report("fileio","remembering file '%s'",filename)
+ report_resolvers("remembering file '%s'",filename)
end
+ resolvers.register_in_trees(filename) -- for tracing used files
return instance.found[stamp]
end
end
if not dangerous[instance.format or "?"] then
if resolvers.isreadable.file(filename) then
if trace_detail then
- logs.report("fileio","file '%s' found directly",filename)
+ report_resolvers("file '%s' found directly",filename)
end
instance.found[stamp] = { filename }
return { filename }
@@ -9711,36 +10357,39 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
end
if find(filename,'%*') then
if trace_locating then
- logs.report("fileio","checking wildcard '%s'", filename)
+ report_resolvers("checking wildcard '%s'", filename)
end
result = resolvers.find_wildcard_files(filename)
elseif file.is_qualified_path(filename) then
if resolvers.isreadable.file(filename) then
if trace_locating then
- logs.report("fileio","qualified name '%s'", filename)
+ report_resolvers("qualified name '%s'", filename)
end
result = { filename }
else
- local forcedname, ok, suffix = "", false, file.extname(filename)
+ local forcedname, ok, suffix = "", false, fileextname(filename)
if suffix == "" then -- why
if instance.format == "" then
forcedname = filename .. ".tex"
if resolvers.isreadable.file(forcedname) then
if trace_locating then
- logs.report("fileio","no suffix, forcing standard filetype 'tex'")
+ report_resolvers("no suffix, forcing standard filetype 'tex'")
end
result, ok = { forcedname }, true
end
else
- local suffixes = resolvers.suffixes_of_format(instance.format)
- for _, s in next, suffixes do
- forcedname = filename .. "." .. s
- if resolvers.isreadable.file(forcedname) then
- if trace_locating then
- logs.report("fileio","no suffix, forcing format filetype '%s'", s)
+ local format_suffixes = suffixes[instance.format]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s = format_suffixes[i]
+ forcedname = filename .. "." .. s
+ if resolvers.isreadable.file(forcedname) then
+ if trace_locating then
+ report_resolvers("no suffix, forcing format filetype '%s'", s)
+ end
+ result, ok = { forcedname }, true
+ break
end
- result, ok = { forcedname }, true
- break
end
end
end
@@ -9748,7 +10397,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
if not ok and suffix ~= "" then
-- try to find in tree (no suffix manipulation), here we search for the
-- matching last part of the name
- local basename = file.basename(filename)
+ local basename = filebasename(filename)
local pattern = gsub(filename .. "$","([%.%-])","%%%1")
local savedformat = instance.format
local format = savedformat or ""
@@ -9789,12 +10438,13 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
-- end
end
if not ok and trace_locating then
- logs.report("fileio","qualified name '%s'", filename)
+ report_resolvers("qualified name '%s'", filename)
end
end
else
-- search spec
- local filetype, extra, done, wantedfiles, ext = '', nil, false, { }, file.extname(filename)
+ local filetype, extra, done, wantedfiles, ext = '', nil, false, { }, fileextname(filename)
+ -- tricky as filename can be bla.1.2.3
if ext == "" then
if not instance.force_suffixes then
wantedfiles[#wantedfiles+1] = filename
@@ -9803,29 +10453,31 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
wantedfiles[#wantedfiles+1] = filename
end
if instance.format == "" then
- if ext == "" then
+ if ext == "" or not suffixmap[ext] then
local forcedname = filename .. '.tex'
wantedfiles[#wantedfiles+1] = forcedname
filetype = resolvers.format_of_suffix(forcedname)
if trace_locating then
- logs.report("fileio","forcing filetype '%s'",filetype)
+ report_resolvers("forcing filetype '%s'",filetype)
end
else
filetype = resolvers.format_of_suffix(filename)
if trace_locating then
- logs.report("fileio","using suffix based filetype '%s'",filetype)
+ report_resolvers("using suffix based filetype '%s'",filetype)
end
end
else
- if ext == "" then
- local suffixes = resolvers.suffixes_of_format(instance.format)
- for _, s in next, suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. s
+ if ext == "" or not suffixmap[ext] then
+ local format_suffixes = suffixes[instance.format]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ end
end
end
filetype = instance.format
if trace_locating then
- logs.report("fileio","using given filetype '%s'",filetype)
+ report_resolvers("using given filetype '%s'",filetype)
end
end
local typespec = resolvers.variable_of_format(filetype)
@@ -9833,13 +10485,13 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
if not pathlist or #pathlist == 0 then
-- no pathlist, access check only / todo == wildcard
if trace_detail then
- logs.report("fileio","checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ report_resolvers("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
end
for k=1,#wantedfiles do
local fname = wantedfiles[k]
if fname and resolvers.isreadable.file(fname) then
filename, done = fname, true
- result[#result+1] = file.join('.',fname)
+ result[#result+1] = filejoin('.',fname)
break
end
end
@@ -9857,11 +10509,11 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
local dirlist = { }
if filelist then
for i=1,#filelist do
- dirlist[i] = file.dirname(filelist[i][2]) .. "/"
+ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
end
end
if trace_detail then
- logs.report("fileio","checking filename '%s'",filename)
+ report_resolvers("checking filename '%s'",filename)
end
-- a bit messy ... esp the doscan setting here
local doscan
@@ -9884,7 +10536,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
expression = gsub(expression,"//", '/.-/') -- not ok for /// but harmless
expression = "^" .. expression .. "$"
if trace_detail then
- logs.report("fileio","using pattern '%s' for path '%s'",expression,pathname)
+ report_resolvers("using pattern '%s' for path '%s'",expression,pathname)
end
for k=1,#filelist do
local fl = filelist[k]
@@ -9893,20 +10545,19 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
if find(d,expression) then
--- todo, test for readable
result[#result+1] = fl[3]
- resolvers.register_in_trees(f) -- for tracing used files
done = true
if instance.allresults then
if trace_detail then
- logs.report("fileio","match in hash for file '%s' on path '%s', continue scanning",f,d)
+ report_resolvers("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
end
else
if trace_detail then
- logs.report("fileio","match in hash for file '%s' on path '%s', quit scanning",f,d)
+ report_resolvers("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
end
break
end
elseif trace_detail then
- logs.report("fileio","no match in hash for file '%s' on path '%s'",f,d)
+ report_resolvers("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
end
end
end
@@ -9919,10 +10570,10 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
if can_be_dir(ppname) then
for k=1,#wantedfiles do
local w = wantedfiles[k]
- local fname = file.join(ppname,w)
+ local fname = filejoin(ppname,w)
if resolvers.isreadable.file(fname) then
if trace_detail then
- logs.report("fileio","found '%s' by scanning",fname)
+ report_resolvers("found '%s' by scanning",fname)
end
result[#result+1] = fname
done = true
@@ -9936,14 +10587,16 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
end
end
if not done and doscan then
- -- todo: slow path scanning
+ -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF
end
if done and not instance.allresults then break end
end
end
end
for k=1,#result do
- result[k] = file.collapse_path(result[k])
+ local rk = collapse_path(result[k])
+ result[k] = rk
+ resolvers.register_in_trees(rk) -- for tracing used files
end
if instance.remember then
instance.found[stamp] = result
@@ -9953,7 +10606,7 @@ end
if not resolvers.concatinators then resolvers.concatinators = { } end
-resolvers.concatinators.tex = file.join
+resolvers.concatinators.tex = filejoin
resolvers.concatinators.file = resolvers.concatinators.tex
function resolvers.find_files(filename,filetype,mustexist)
@@ -9980,8 +10633,14 @@ function resolvers.find_file(filename,filetype,mustexist)
return (resolvers.find_files(filename,filetype,mustexist)[1] or "")
end
+function resolvers.find_path(filename,filetype)
+ local path = resolvers.find_files(filename,filetype)[1] or ""
+ -- todo return current path
+ return file.dirname(path)
+end
+
function resolvers.find_given_files(filename)
- local bname, result = file.basename(filename), { }
+ local bname, result = filebasename(filename), { }
local hashes = instance.hashes
for k=1,#hashes do
local hash = hashes[k]
@@ -10038,9 +10697,9 @@ local function doit(path,blist,bname,tag,kind,result,allresults)
return done
end
-function resolvers.find_wildcard_files(filename) -- todo: remap:
+function resolvers.find_wildcard_files(filename) -- todo: remap: and lpeg
local result = { }
- local bname, dname = file.basename(filename), file.dirname(filename)
+ local bname, dname = filebasename(filename), filedirname(filename)
local path = gsub(dname,"^*/","")
path = gsub(path,"*",".*")
path = gsub(path,"-","%%-")
@@ -10093,24 +10752,24 @@ end
function resolvers.load(option)
statistics.starttiming(instance)
- resolvers.resetconfig()
- resolvers.identify_cnf()
- resolvers.load_lua() -- will become the new method
- resolvers.expand_variables()
- resolvers.load_cnf() -- will be skipped when we have a lua file
+ identify_configuration_files()
+ load_configuration_files()
+ collapse_configuration_data()
resolvers.expand_variables()
if option ~= "nofiles" then
- resolvers.load_hash()
+ load_databases()
resolvers.automount()
end
statistics.stoptiming(instance)
+ local files = instance.files
+ return files and next(files) and true
end
function resolvers.for_files(command, files, filetype, mustexist)
if files and #files > 0 then
local function report(str)
if trace_locating then
- logs.report("fileio",str) -- has already verbose
+ report_resolvers(str) -- has already verbose
else
print(str)
end
@@ -10158,51 +10817,6 @@ function resolvers.register_file(files, name, path)
end
end
-function resolvers.splitmethod(filename)
- if not filename then
- return { } -- safeguard
- elseif type(filename) == "table" then
- return filename -- already split
- elseif not find(filename,"://") then
- return { scheme="file", path = filename, original=filename } -- quick hack
- else
- return url.hashed(filename)
- end
-end
-
-function table.sequenced(t,sep) -- temp here
- local s = { }
- for k, v in next, t do -- indexed?
- s[#s+1] = k .. "=" .. tostring(v)
- end
- return concat(s, sep or " | ")
-end
-
-function resolvers.methodhandler(what, filename, filetype) -- ...
- filename = file.collapse_path(filename)
- local specification = (type(filename) == "string" and resolvers.splitmethod(filename)) or filename -- no or { }, let it bomb
- local scheme = specification.scheme
- if resolvers[what][scheme] then
- if trace_locating then
- logs.report("fileio","handler '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification))
- end
- return resolvers[what][scheme](filename,filetype) -- todo: specification
- else
- return resolvers[what].tex(filename,filetype) -- todo: specification
- end
-end
-
-function resolvers.clean_path(str)
- if str then
- str = gsub(str,"\\","/")
- str = gsub(str,"^!+","")
- str = gsub(str,"^~",resolvers.homedir)
- return str
- else
- return nil
- end
-end
-
function resolvers.do_with_path(name,func)
local pathlist = resolvers.expanded_path_list(name)
for i=1,#pathlist do
@@ -10214,45 +10828,13 @@ function resolvers.do_with_var(name,func)
func(expanded_var(name))
end
-function resolvers.with_files(pattern,handle)
- local hashes = instance.hashes
- for i=1,#hashes do
- local hash = hashes[i]
- local blobpath = hash.tag
- local blobtype = hash.type
- if blobpath then
- local files = instance.files[blobpath]
- if files then
- for k,v in next, files do
- if find(k,"^remap:") then
- k = files[k]
- v = files[k] -- chained
- end
- if find(k,pattern) then
- if type(v) == "string" then
- handle(blobtype,blobpath,v,k)
- else
- for _,vv in next, v do -- indexed
- handle(blobtype,blobpath,vv,k)
- end
- end
- end
- end
- end
- end
- end
-end
-
function resolvers.locate_format(name)
- local barename, fmtname = gsub(name,"%.%a+$",""), ""
- if resolvers.usecache then
- local path = file.join(caches.setpath("formats")) -- maybe platform
- fmtname = file.join(path,barename..".fmt") or ""
- end
+ local barename = gsub(name,"%.%a+$","")
+ local fmtname = caches.getfirstreadablefile(barename..".fmt","formats") or ""
if fmtname == "" then
fmtname = resolvers.find_files(barename..".fmt")[1] or ""
+ fmtname = resolvers.clean_path(fmtname)
end
- fmtname = resolvers.clean_path(fmtname)
if fmtname ~= "" then
local barename = file.removesuffix(fmtname)
local luaname, lucname, luiname = barename .. ".lua", barename .. ".luc", barename .. ".lui"
@@ -10277,196 +10859,48 @@ function resolvers.boolean_variable(str,default)
end
end
-texconfig.kpse_init = false
-
-kpse = { original = kpse } setmetatable(kpse, { __index = function(k,v) return resolvers[v] end } )
-
--- for a while
-
-input = resolvers
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['data-tmp'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-<p>This module deals with caching data. It sets up the paths and
-implements loaders and savers for tables. Best is to set the
-following variable. When not set, the usual paths will be
-checked. Personally I prefer the (users) temporary path.</p>
-
-</code>
-TEXMFCACHE=$TMP;$TEMP;$TMPDIR;$TEMPDIR;$HOME;$TEXMFVAR;$VARTEXMF;.
-</code>
-
-<p>Currently we do no locking when we write files. This is no real
-problem because most caching involves fonts and the chance of them
-being written at the same time is small. We also need to extend
-luatools with a recache feature.</p>
---ldx]]--
-
-local format, lower, gsub = string.format, string.lower, string.gsub
-
-local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) -- not used yet
-
-caches = caches or { }
-
-caches.path = caches.path or nil
-caches.base = caches.base or "luatex-cache"
-caches.more = caches.more or "context"
-caches.direct = false -- true is faster but may need huge amounts of memory
-caches.tree = false
-caches.paths = caches.paths or nil
-caches.force = false
-caches.defaults = { "TEXMFCACHE", "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" }
-
-function caches.temp()
- local cachepath = nil
- local function check(list,isenv)
- if not cachepath then
- for k=1,#list do
- local v = list[k]
- cachepath = (isenv and (os.env[v] or "")) or v or ""
- if cachepath == "" then
- -- next
- else
- cachepath = resolvers.clean_path(cachepath)
- if lfs.isdir(cachepath) and file.iswritable(cachepath) then -- lfs.attributes(cachepath,"mode") == "directory"
- break
- elseif caches.force or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
- dir.mkdirs(cachepath)
- if lfs.isdir(cachepath) and file.iswritable(cachepath) then
- break
+function resolvers.with_files(pattern,handle,before,after) -- can be a nice iterator instead
+ local instance = resolvers.instance
+ local hashes = instance.hashes
+ for i=1,#hashes do
+ local hash = hashes[i]
+ local blobtype = hash.type
+ local blobpath = hash.tag
+ if blobpath then
+ if before then
+ before(blobtype,blobpath,pattern)
+ end
+ local files = instance.files[blobpath]
+ local total, checked, done = 0, 0, 0
+ if files then
+ for k,v in next, files do
+ total = total + 1
+ if find(k,"^remap:") then
+ k = files[k]
+ v = k -- files[k] -- chained
+ end
+ if find(k,pattern) then
+ if type(v) == "string" then
+ checked = checked + 1
+ if handle(blobtype,blobpath,v,k) then
+ done = done + 1
+ end
+ else
+ checked = checked + #v
+ for i=1,#v do
+ if handle(blobtype,blobpath,v[i],k) then
+ done = done + 1
+ end
+ end
end
end
end
- cachepath = nil
+ end
+ if after then
+ after(blobtype,blobpath,pattern,total,checked,done)
end
end
end
- check(resolvers.clean_path_list("TEXMFCACHE") or { })
- check(caches.defaults,true)
- if not cachepath then
- print("\nfatal error: there is no valid (writable) cache path defined\n")
- os.exit()
- elseif not lfs.isdir(cachepath) then -- lfs.attributes(cachepath,"mode") ~= "directory"
- print(format("\nfatal error: cache path %s is not a directory\n",cachepath))
- os.exit()
- end
- cachepath = file.collapse_path(cachepath)
- function caches.temp()
- return cachepath
- end
- return cachepath
-end
-
-function caches.configpath()
- return table.concat(resolvers.instance.cnffiles,";")
-end
-
-function caches.hashed(tree)
- return md5.hex(gsub(lower(tree),"[\\\/]+","/"))
-end
-
-function caches.treehash()
- local tree = caches.configpath()
- if not tree or tree == "" then
- return false
- else
- return caches.hashed(tree)
- end
-end
-
-function caches.setpath(...)
- if not caches.path then
- if not caches.path then
- caches.path = caches.temp()
- end
- caches.path = resolvers.clean_path(caches.path) -- to be sure
- caches.tree = caches.tree or caches.treehash()
- if caches.tree then
- caches.path = dir.mkdirs(caches.path,caches.base,caches.more,caches.tree)
- else
- caches.path = dir.mkdirs(caches.path,caches.base,caches.more)
- end
- end
- if not caches.path then
- caches.path = '.'
- end
- caches.path = resolvers.clean_path(caches.path)
- local dirs = { ... }
- if #dirs > 0 then
- local pth = dir.mkdirs(caches.path,...)
- return pth
- end
- caches.path = dir.expand_name(caches.path)
- return caches.path
-end
-
-function caches.definepath(category,subcategory)
- return function()
- return caches.setpath(category,subcategory)
- end
-end
-
-function caches.setluanames(path,name)
- return path .. "/" .. name .. ".tma", path .. "/" .. name .. ".tmc"
-end
-
-function caches.loaddata(path,name)
- local tmaname, tmcname = caches.setluanames(path,name)
- local loader = loadfile(tmcname) or loadfile(tmaname)
- if loader then
- loader = loader()
- collectgarbage("step")
- return loader
- else
- return false
- end
-end
-
---~ function caches.loaddata(path,name)
---~ local tmaname, tmcname = caches.setluanames(path,name)
---~ return dofile(tmcname) or dofile(tmaname)
---~ end
-
-function caches.iswritable(filepath,filename)
- local tmaname, tmcname = caches.setluanames(filepath,filename)
- return file.iswritable(tmaname)
-end
-
-function caches.savedata(filepath,filename,data,raw)
- local tmaname, tmcname = caches.setluanames(filepath,filename)
- local reduce, simplify = true, true
- if raw then
- reduce, simplify = false, false
- end
- data.cache_uuid = os.uuid()
- if caches.direct then
- file.savedata(tmaname, table.serialize(data,'return',false,true,false)) -- no hex
- else
- table.tofile(tmaname, data,'return',false,true,false) -- maybe not the last true
- end
- local cleanup = resolvers.boolean_variable("PURGECACHE", false)
- local strip = resolvers.boolean_variable("LUACSTRIP", true)
- utils.lua.compile(tmaname, tmcname, cleanup, strip)
-end
-
--- here we use the cache for format loading (texconfig.[formatname|jobname])
-
---~ if tex and texconfig and texconfig.formatname and texconfig.formatname == "" then
-if tex and texconfig and (not texconfig.formatname or texconfig.formatname == "") and input and resolvers.instance then
- if not texconfig.luaname then texconfig.luaname = "cont-en.lua" end -- or luc
- texconfig.formatname = caches.setpath("formats") .. "/" .. gsub(texconfig.luaname,"%.lu.$",".fmt")
end
@@ -10474,7 +10908,7 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-res'] = {
+if not modules then modules = { } end modules ['data-pre'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -10482,14 +10916,15 @@ if not modules then modules = { } end modules ['data-res'] = {
license = "see context related readme files"
}
---~ print(resolvers.resolve("abc env:tmp file:cont-en.tex path:cont-en.tex full:cont-en.tex rel:zapf/one/p-chars.tex"))
local upper, lower, gsub = string.upper, string.lower, string.gsub
local prefixes = { }
-prefixes.environment = function(str)
- return resolvers.clean_path(os.getenv(str) or os.getenv(upper(str)) or os.getenv(lower(str)) or "")
+local getenv = resolvers.getenv
+
+prefixes.environment = function(str) -- getenv is case insensitive anyway
+ return resolvers.clean_path(getenv(str) or getenv(upper(str)) or getenv(lower(str)) or "")
end
prefixes.relative = function(str,n)
@@ -10627,7 +11062,7 @@ end -- of closure
do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['data-con'] = {
- version = 1.001,
+ version = 1.100,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
@@ -10657,46 +11092,58 @@ containers = containers or { }
containers.usecache = true
+local report_cache = logs.new("cache")
+
local function report(container,tag,name)
if trace_cache or trace_containers then
- logs.report(format("%s cache",container.subcategory),"%s: %s",tag,name or 'invalid')
+ report_cache("container: %s, tag: %s, name: %s",container.subcategory,tag,name or 'invalid')
end
end
local allocated = { }
--- tracing
+local mt = {
+ __index = function(t,k)
+ if k == "writable" then
+ local writable = caches.getwritablepath(t.category,t.subcategory) or { "." }
+ t.writable = writable
+ return writable
+ elseif k == "readables" then
+ local readables = caches.getreadablepaths(t.category,t.subcategory) or { "." }
+ t.readables = readables
+ return readables
+ end
+ end
+}
function containers.define(category, subcategory, version, enabled)
- return function()
- if category and subcategory then
- local c = allocated[category]
- if not c then
- c = { }
- allocated[category] = c
- end
- local s = c[subcategory]
- if not s then
- s = {
- category = category,
- subcategory = subcategory,
- storage = { },
- enabled = enabled,
- version = version or 1.000,
- trace = false,
- path = caches and caches.setpath and caches.setpath(category,subcategory),
- }
- c[subcategory] = s
- end
- return s
- else
- return nil
+ if category and subcategory then
+ local c = allocated[category]
+ if not c then
+ c = { }
+ allocated[category] = c
+ end
+ local s = c[subcategory]
+ if not s then
+ s = {
+ category = category,
+ subcategory = subcategory,
+ storage = { },
+ enabled = enabled,
+ version = version or math.pi, -- after all, this is TeX
+ trace = false,
+ -- writable = caches.getwritablepath and caches.getwritablepath (category,subcategory) or { "." },
+ -- readables = caches.getreadablepaths and caches.getreadablepaths(category,subcategory) or { "." },
+ }
+ setmetatable(s,mt)
+ c[subcategory] = s
end
+ return s
end
end
function containers.is_usable(container, name)
- return container.enabled and caches and caches.iswritable(container.path, name)
+ return container.enabled and caches and caches.iswritable(container.writable, name)
end
function containers.is_valid(container, name)
@@ -10709,18 +11156,20 @@ function containers.is_valid(container, name)
end
function containers.read(container,name)
- if container.enabled and caches and not container.storage[name] and containers.usecache then
- container.storage[name] = caches.loaddata(container.path,name)
- if containers.is_valid(container,name) then
+ local storage = container.storage
+ local stored = storage[name]
+ if not stored and container.enabled and caches and containers.usecache then
+ stored = caches.loaddata(container.readables,name)
+ if stored and stored.cache_version == container.version then
report(container,"loaded",name)
else
- container.storage[name] = nil
+ stored = nil
end
- end
- if container.storage[name] then
+ storage[name] = stored
+ elseif stored then
report(container,"reusing",name)
end
- return container.storage[name]
+ return stored
end
function containers.write(container, name, data)
@@ -10729,7 +11178,7 @@ function containers.write(container, name, data)
if container.enabled and caches then
local unique, shared = data.unique, data.shared
data.unique, data.shared = nil, nil
- caches.savedata(container.path, name, data)
+ caches.savedata(container.writable, name, data)
report(container,"saved",name)
data.unique, data.shared = unique, shared
end
@@ -10764,41 +11213,7 @@ local format, lower, gsub, find = string.format, string.lower, string.gsub, stri
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
--- since we want to use the cache instead of the tree, we will now
--- reimplement the saver.
-
-local save_data = resolvers.save_data
-local load_data = resolvers.load_data
-
-resolvers.cachepath = nil -- public, for tracing
-resolvers.usecache = true -- public, for tracing
-
-function resolvers.save_data(dataname)
- save_data(dataname, function(cachename,dataname)
- resolvers.usecache = not toboolean(resolvers.expansion("CACHEINTDS") or "false",true)
- if resolvers.usecache then
- resolvers.cachepath = resolvers.cachepath or caches.definepath("trees")
- return file.join(resolvers.cachepath(),caches.hashed(cachename))
- else
- return file.join(cachename,dataname)
- end
- end)
-end
-
-function resolvers.load_data(pathname,dataname,filename)
- load_data(pathname,dataname,filename,function(dataname,filename)
- resolvers.usecache = not toboolean(resolvers.expansion("CACHEINTDS") or "false",true)
- if resolvers.usecache then
- resolvers.cachepath = resolvers.cachepath or caches.definepath("trees")
- return file.join(resolvers.cachepath(),caches.hashed(pathname))
- else
- if not filename or (filename == "") then
- filename = dataname
- end
- return file.join(pathname,filename)
- end
- end)
-end
+local report_resolvers = logs.new("resolvers")
-- we will make a better format, maybe something xml or just text or lua
@@ -10807,7 +11222,7 @@ resolvers.automounted = resolvers.automounted or { }
function resolvers.automount(usecache)
local mountpaths = resolvers.clean_path_list(resolvers.expansion('TEXMFMOUNT'))
if (not mountpaths or #mountpaths == 0) and usecache then
- mountpaths = { caches.setpath("mount") }
+ mountpaths = caches.getreadablepaths("mount")
end
if mountpaths and #mountpaths > 0 then
statistics.starttiming(resolvers.instance)
@@ -10821,7 +11236,7 @@ function resolvers.automount(usecache)
-- skip
elseif find(line,"^zip://") then
if trace_locating then
- logs.report("fileio","mounting %s",line)
+ report_resolvers("mounting %s",line)
end
table.insert(resolvers.automounted,line)
resolvers.usezipfile(line)
@@ -10837,8 +11252,8 @@ end
-- status info
-statistics.register("used config path", function() return caches.configpath() end)
-statistics.register("used cache path", function() return caches.temp() or "?" end)
+statistics.register("used config file", function() return caches.configfiles() end)
+statistics.register("used cache path", function() return caches.usedpaths() end)
-- experiment (code will move)
@@ -10866,11 +11281,11 @@ function statistics.check_fmt_status(texname)
local sourcehash = md5.hex(io.loaddata(resolvers.find_file(luv.sourcefile)) or "unknown")
local luvbanner = luv.enginebanner or "?"
if luvbanner ~= enginebanner then
- return string.format("engine mismatch (luv:%s <> bin:%s)",luvbanner,enginebanner)
+ return format("engine mismatch (luv: %s <> bin: %s)",luvbanner,enginebanner)
end
local luvhash = luv.sourcehash or "?"
if luvhash ~= sourcehash then
- return string.format("source mismatch (luv:%s <> bin:%s)",luvhash,sourcehash)
+ return format("source mismatch (luv: %s <> bin: %s)",luvhash,sourcehash)
end
else
return "invalid status file"
@@ -10900,6 +11315,8 @@ local unpack = unpack or table.unpack
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local report_resolvers = logs.new("resolvers")
+
-- zip:///oeps.zip?name=bla/bla.tex
-- zip:///oeps.zip?tree=tex/texmf-local
-- zip:///texmf.zip?tree=/tex/texmf
@@ -10950,16 +11367,16 @@ function locators.zip(specification) -- where is this used? startup zips (untest
local zfile = zip.openarchive(name) -- tricky, could be in to be initialized tree
if trace_locating then
if zfile then
- logs.report("fileio","zip locator, archive '%s' found",specification.original)
+ report_resolvers("zip locator, archive '%s' found",specification.original)
else
- logs.report("fileio","zip locator, archive '%s' not found",specification.original)
+ report_resolvers("zip locator, archive '%s' not found",specification.original)
end
end
end
function hashers.zip(tag,name)
if trace_locating then
- logs.report("fileio","loading zip file '%s' as '%s'",name,tag)
+ report_resolvers("loading zip file '%s' as '%s'",name,tag)
end
resolvers.usezipfile(format("%s?tree=%s",tag,name))
end
@@ -10984,25 +11401,25 @@ function finders.zip(specification,filetype)
local zfile = zip.openarchive(specification.path)
if zfile then
if trace_locating then
- logs.report("fileio","zip finder, archive '%s' found",specification.path)
+ report_resolvers("zip finder, archive '%s' found",specification.path)
end
local dfile = zfile:open(q.name)
if dfile then
dfile = zfile:close()
if trace_locating then
- logs.report("fileio","zip finder, file '%s' found",q.name)
+ report_resolvers("zip finder, file '%s' found",q.name)
end
return specification.original
elseif trace_locating then
- logs.report("fileio","zip finder, file '%s' not found",q.name)
+ report_resolvers("zip finder, file '%s' not found",q.name)
end
elseif trace_locating then
- logs.report("fileio","zip finder, unknown archive '%s'",specification.path)
+ report_resolvers("zip finder, unknown archive '%s'",specification.path)
end
end
end
if trace_locating then
- logs.report("fileio","zip finder, '%s' not found",filename)
+ report_resolvers("zip finder, '%s' not found",filename)
end
return unpack(finders.notfound)
end
@@ -11015,25 +11432,25 @@ function openers.zip(specification)
local zfile = zip.openarchive(zipspecification.path)
if zfile then
if trace_locating then
- logs.report("fileio","zip opener, archive '%s' opened",zipspecification.path)
+ report_resolvers("zip opener, archive '%s' opened",zipspecification.path)
end
local dfile = zfile:open(q.name)
if dfile then
logs.show_open(specification)
if trace_locating then
- logs.report("fileio","zip opener, file '%s' found",q.name)
+ report_resolvers("zip opener, file '%s' found",q.name)
end
return openers.text_opener(specification,dfile,'zip')
elseif trace_locating then
- logs.report("fileio","zip opener, file '%s' not found",q.name)
+ report_resolvers("zip opener, file '%s' not found",q.name)
end
elseif trace_locating then
- logs.report("fileio","zip opener, unknown archive '%s'",zipspecification.path)
+ report_resolvers("zip opener, unknown archive '%s'",zipspecification.path)
end
end
end
if trace_locating then
- logs.report("fileio","zip opener, '%s' not found",filename)
+ report_resolvers("zip opener, '%s' not found",filename)
end
return unpack(openers.notfound)
end
@@ -11046,27 +11463,27 @@ function loaders.zip(specification)
local zfile = zip.openarchive(specification.path)
if zfile then
if trace_locating then
- logs.report("fileio","zip loader, archive '%s' opened",specification.path)
+ report_resolvers("zip loader, archive '%s' opened",specification.path)
end
local dfile = zfile:open(q.name)
if dfile then
logs.show_load(filename)
if trace_locating then
- logs.report("fileio","zip loader, file '%s' loaded",filename)
+ report_resolvers("zip loader, file '%s' loaded",filename)
end
local s = dfile:read("*all")
dfile:close()
return true, s, #s
elseif trace_locating then
- logs.report("fileio","zip loader, file '%s' not found",q.name)
+ report_resolvers("zip loader, file '%s' not found",q.name)
end
elseif trace_locating then
- logs.report("fileio","zip loader, unknown archive '%s'",specification.path)
+ report_resolvers("zip loader, unknown archive '%s'",specification.path)
end
end
end
if trace_locating then
- logs.report("fileio","zip loader, '%s' not found",filename)
+ report_resolvers("zip loader, '%s' not found",filename)
end
return unpack(openers.notfound)
end
@@ -11084,7 +11501,7 @@ function resolvers.usezipfile(zipname)
if z then
local instance = resolvers.instance
if trace_locating then
- logs.report("fileio","zip registering, registering archive '%s'",zipname)
+ report_resolvers("zip registering, registering archive '%s'",zipname)
end
statistics.starttiming(instance)
resolvers.prepend_hash('zip',zipname,zipfile)
@@ -11093,10 +11510,10 @@ function resolvers.usezipfile(zipname)
instance.files[zipname] = resolvers.register_zip_file(z,tree or "")
statistics.stoptiming(instance)
elseif trace_locating then
- logs.report("fileio","zip registering, unknown archive '%s'",zipname)
+ report_resolvers("zip registering, unknown archive '%s'",zipname)
end
elseif trace_locating then
- logs.report("fileio","zip registering, '%s' not found",zipname)
+ report_resolvers("zip registering, '%s' not found",zipname)
end
end
@@ -11108,7 +11525,7 @@ function resolvers.register_zip_file(z,tree)
filter = format("^%s/(.+)/(.-)$",tree)
end
if trace_locating then
- logs.report("fileio","zip registering, using filter '%s'",filter)
+ report_resolvers("zip registering, using filter '%s'",filter)
end
local register, n = resolvers.register_file, 0
for i in z:files() do
@@ -11125,7 +11542,7 @@ function resolvers.register_zip_file(z,tree)
n = n + 1
end
end
- logs.report("fileio","zip registering, %s files registered",n)
+ report_resolvers("zip registering, %s files registered",n)
return files
end
@@ -11134,6 +11551,93 @@ end -- of closure
do -- create closure to overcome 200 locals limit
+if not modules then modules = { } end modules ['data-tre'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- \input tree://oeps1/**/oeps.tex
+
+local find, gsub, format = string.find, string.gsub, string.format
+local unpack = unpack or table.unpack
+
+local report_resolvers = logs.new("resolvers")
+
+local done, found, notfound = { }, { }, resolvers.finders.notfound
+
+function resolvers.finders.tree(specification,filetype)
+ local fnd = found[specification]
+ if not fnd then
+ local spec = resolvers.splitmethod(specification).path or ""
+ if spec ~= "" then
+ local path, name = file.dirname(spec), file.basename(spec)
+ if path == "" then path = "." end
+ local hash = done[path]
+ if not hash then
+ local pattern = path .. "/*" -- we will use the proper splitter
+ hash = dir.glob(pattern)
+ done[path] = hash
+ end
+ local pattern = "/" .. gsub(name,"([%.%-%+])", "%%%1") .. "$"
+ for k=1,#hash do
+ local v = hash[k]
+ if find(v,pattern) then
+ found[specification] = v
+ return v
+ end
+ end
+ end
+ fnd = unpack(notfound) -- unpack ? why not just notfound[1]
+ found[specification] = fnd
+ end
+ return fnd
+end
+
+function resolvers.locators.tree(specification)
+ local spec = resolvers.splitmethod(specification)
+ local path = spec.path
+ if path ~= '' and lfs.isdir(path) then
+ if trace_locating then
+ report_resolvers("tree locator '%s' found (%s)",path,specification)
+ end
+ resolvers.append_hash('tree',specification,path,false) -- don't cache
+ elseif trace_locating then
+ report_resolvers("tree locator '%s' not found",path)
+ end
+end
+
+function resolvers.hashers.tree(tag,name)
+ if trace_locating then
+ report_resolvers("analysing tree '%s' as '%s'",name,tag)
+ end
+ -- todo: maybe share with done above
+ local spec = resolvers.splitmethod(tag)
+ local path = spec.path
+ resolvers.generators.tex(path,tag) -- we share this with the normal tree analyzer
+end
+
+function resolvers.generators.tree(tag)
+ local spec = resolvers.splitmethod(tag)
+ local path = spec.path
+ resolvers.generators.tex(path,tag) -- we share this with the normal tree analyzer
+end
+
+function resolvers.concatinators.tree(tag,path,name)
+ return file.join(tag,path,name)
+end
+
+resolvers.isreadable.tree = file.isreadable
+resolvers.openers.tree = resolvers.openers.generic
+resolvers.loaders.tree = resolvers.loaders.generic
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
if not modules then modules = { } end modules ['data-crl'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -11142,32 +11646,31 @@ if not modules then modules = { } end modules ['data-crl'] = {
license = "see context related readme files"
}
-local gsub = string.gsub
+-- this one is replaced by data-sch.lua --
curl = curl or { }
-curl.cached = { }
-curl.cachepath = caches.definepath("curl")
-
+local gsub = string.gsub
local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
-function curl.fetch(protocol, name)
- local cachename = curl.cachepath() .. "/" .. gsub(name,"[^%a%d%.]+","-")
--- cachename = gsub(cachename,"[\\/]", io.fileseparator)
- cachename = gsub(cachename,"[\\]", "/") -- cleanup
- if not curl.cached[name] then
+local cached = { }
+
+function curl.fetch(protocol, name) -- todo: use socket library
+ local cleanname = gsub(name,"[^%a%d%.]+","-")
+ local cachename = caches.setfirstwritablefile(cleanname,"curl")
+ if not cached[name] then
if not io.exists(cachename) then
- curl.cached[name] = cachename
+ cached[name] = cachename
local command = "curl --silent --create-dirs --output " .. cachename .. " " .. name -- no protocol .. "://"
os.spawn(command)
end
if io.exists(cachename) then
- curl.cached[name] = cachename
+ cached[name] = cachename
else
- curl.cached[name] = ""
+ cached[name] = ""
end
end
- return curl.cached[name]
+ return cached[name]
end
function finders.curl(protocol,filename)
@@ -11214,6 +11717,8 @@ if not modules then modules = { } end modules ['data-lua'] = {
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local report_resolvers = logs.new("resolvers")
+
local gsub, insert = string.gsub, table.insert
local unpack = unpack or table.unpack
@@ -11242,7 +11747,7 @@ local function thepath(...)
local t = { ... } t[#t+1] = "?.lua"
local path = file.join(unpack(t))
if trace_locating then
- logs.report("fileio","! appending '%s' to 'package.path'",path)
+ report_resolvers("! appending '%s' to 'package.path'",path)
end
return path
end
@@ -11264,11 +11769,11 @@ local function loaded(libpaths,name,simple)
local libpath = libpaths[i]
local resolved = gsub(libpath,"%?",simple)
if trace_locating then -- more detail
- logs.report("fileio","! checking for '%s' on 'package.path': '%s' => '%s'",simple,libpath,resolved)
+ report_resolvers("! checking for '%s' on 'package.path': '%s' => '%s'",simple,libpath,resolved)
end
if resolvers.isreadable.file(resolved) then
if trace_locating then
- logs.report("fileio","! lib '%s' located via 'package.path': '%s'",name,resolved)
+ report_resolvers("! lib '%s' located via 'package.path': '%s'",name,resolved)
end
return loadfile(resolved)
end
@@ -11278,17 +11783,17 @@ end
package.loaders[2] = function(name) -- was [#package.loaders+1]
if trace_locating then -- mode detail
- logs.report("fileio","! locating '%s'",name)
+ report_resolvers("! locating '%s'",name)
end
for i=1,#libformats do
local format = libformats[i]
local resolved = resolvers.find_file(name,format) or ""
if trace_locating then -- mode detail
- logs.report("fileio","! checking for '%s' using 'libformat path': '%s'",name,format)
+ report_resolvers("! checking for '%s' using 'libformat path': '%s'",name,format)
end
if resolved ~= "" then
if trace_locating then
- logs.report("fileio","! lib '%s' located via environment: '%s'",name,resolved)
+ report_resolvers("! lib '%s' located via environment: '%s'",name,resolved)
end
return loadfile(resolved)
end
@@ -11311,11 +11816,11 @@ package.loaders[2] = function(name) -- was [#package.loaders+1]
local path = paths[p]
local resolved = file.join(path,libname)
if trace_locating then -- mode detail
- logs.report("fileio","! checking for '%s' using 'clibformat path': '%s'",libname,path)
+ report_resolvers("! checking for '%s' using 'clibformat path': '%s'",libname,path)
end
if resolvers.isreadable.file(resolved) then
if trace_locating then
- logs.report("fileio","! lib '%s' located via 'clibformat': '%s'",libname,resolved)
+ report_resolvers("! lib '%s' located via 'clibformat': '%s'",libname,resolved)
end
return package.loadlib(resolved,name)
end
@@ -11325,28 +11830,28 @@ package.loaders[2] = function(name) -- was [#package.loaders+1]
local libpath = clibpaths[i]
local resolved = gsub(libpath,"?",simple)
if trace_locating then -- more detail
- logs.report("fileio","! checking for '%s' on 'package.cpath': '%s'",simple,libpath)
+ report_resolvers("! checking for '%s' on 'package.cpath': '%s'",simple,libpath)
end
if resolvers.isreadable.file(resolved) then
if trace_locating then
- logs.report("fileio","! lib '%s' located via 'package.cpath': '%s'",name,resolved)
+ report_resolvers("! lib '%s' located via 'package.cpath': '%s'",name,resolved)
end
return package.loadlib(resolved,name)
end
end
-- just in case the distribution is messed up
if trace_loading then -- more detail
- logs.report("fileio","! checking for '%s' using 'luatexlibs': '%s'",name)
+ report_resolvers("! checking for '%s' using 'luatexlibs': '%s'",name)
end
local resolved = resolvers.find_file(file.basename(name),'luatexlibs') or ""
if resolved ~= "" then
if trace_locating then
- logs.report("fileio","! lib '%s' located by basename via environment: '%s'",name,resolved)
+ report_resolvers("! lib '%s' located by basename via environment: '%s'",name,resolved)
end
return loadfile(resolved)
end
if trace_locating then
- logs.report("fileio",'? unable to locate lib: %s',name)
+ report_resolvers('? unable to locate lib: %s',name)
end
-- return "unable to locate " .. name
end
@@ -11358,113 +11863,6 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['luat-kps'] = {
- version = 1.001,
- comment = "companion to luatools.lua",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-<p>This file is used when we want the input handlers to behave like
-<type>kpsewhich</type>. What to do with the following:</p>
-
-<typing>
-{$SELFAUTOLOC,$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}
-$SELFAUTOLOC : /usr/tex/bin/platform
-$SELFAUTODIR : /usr/tex/bin
-$SELFAUTOPARENT : /usr/tex
-</typing>
-
-<p>How about just forgetting about them?</p>
---ldx]]--
-
-local suffixes = resolvers.suffixes
-local formats = resolvers.formats
-
-suffixes['gf'] = { '<resolution>gf' }
-suffixes['pk'] = { '<resolution>pk' }
-suffixes['base'] = { 'base' }
-suffixes['bib'] = { 'bib' }
-suffixes['bst'] = { 'bst' }
-suffixes['cnf'] = { 'cnf' }
-suffixes['mem'] = { 'mem' }
-suffixes['mf'] = { 'mf' }
-suffixes['mfpool'] = { 'pool' }
-suffixes['mft'] = { 'mft' }
-suffixes['mppool'] = { 'pool' }
-suffixes['graphic/figure'] = { 'eps', 'epsi' }
-suffixes['texpool'] = { 'pool' }
-suffixes['PostScript header'] = { 'pro' }
-suffixes['ist'] = { 'ist' }
-suffixes['web'] = { 'web', 'ch' }
-suffixes['cweb'] = { 'w', 'web', 'ch' }
-suffixes['cmap files'] = { 'cmap' }
-suffixes['lig files'] = { 'lig' }
-suffixes['bitmap font'] = { }
-suffixes['MetaPost support'] = { }
-suffixes['TeX system documentation'] = { }
-suffixes['TeX system sources'] = { }
-suffixes['dvips config'] = { }
-suffixes['type42 fonts'] = { }
-suffixes['web2c files'] = { }
-suffixes['other text files'] = { }
-suffixes['other binary files'] = { }
-suffixes['opentype fonts'] = { 'otf' }
-
-suffixes['fmt'] = { 'fmt' }
-suffixes['texmfscripts'] = { 'rb','lua','py','pl' }
-
-suffixes['pdftex config'] = { }
-suffixes['Troff fonts'] = { }
-
-suffixes['ls-R'] = { }
-
---[[ldx--
-<p>If you wondered abou tsome of the previous mappings, how about
-the next bunch:</p>
---ldx]]--
-
-formats['bib'] = ''
-formats['bst'] = ''
-formats['mft'] = ''
-formats['ist'] = ''
-formats['web'] = ''
-formats['cweb'] = ''
-formats['MetaPost support'] = ''
-formats['TeX system documentation'] = ''
-formats['TeX system sources'] = ''
-formats['Troff fonts'] = ''
-formats['dvips config'] = ''
-formats['graphic/figure'] = ''
-formats['ls-R'] = ''
-formats['other text files'] = ''
-formats['other binary files'] = ''
-
-formats['gf'] = ''
-formats['pk'] = ''
-formats['base'] = 'MFBASES'
-formats['cnf'] = ''
-formats['mem'] = 'MPMEMS'
-formats['mf'] = 'MFINPUTS'
-formats['mfpool'] = 'MFPOOL'
-formats['mppool'] = 'MPPOOL'
-formats['texpool'] = 'TEXPOOL'
-formats['PostScript header'] = 'TEXPSHEADERS'
-formats['cmap files'] = 'CMAPFONTS'
-formats['type42 fonts'] = 'T42FONTS'
-formats['web2c files'] = 'WEB2C'
-formats['pdftex config'] = 'PDFTEXCONFIG'
-formats['texmfscripts'] = 'TEXMFSCRIPTS'
-formats['bitmap font'] = ''
-formats['lig files'] = 'LIGFONTS'
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
if not modules then modules = { } end modules ['data-aux'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -11474,49 +11872,52 @@ if not modules then modules = { } end modules ['data-aux'] = {
}
local find = string.find
+local type, next = type, next
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local report_resolvers = logs.new("resolvers")
+
function resolvers.update_script(oldname,newname) -- oldname -> own.name, not per se a suffix
local scriptpath = "scripts/context/lua"
newname = file.addsuffix(newname,"lua")
local oldscript = resolvers.clean_path(oldname)
if trace_locating then
- logs.report("fileio","to be replaced old script %s", oldscript)
+ report_resolvers("to be replaced old script %s", oldscript)
end
local newscripts = resolvers.find_files(newname) or { }
if #newscripts == 0 then
if trace_locating then
- logs.report("fileio","unable to locate new script")
+ report_resolvers("unable to locate new script")
end
else
for i=1,#newscripts do
local newscript = resolvers.clean_path(newscripts[i])
if trace_locating then
- logs.report("fileio","checking new script %s", newscript)
+ report_resolvers("checking new script %s", newscript)
end
if oldscript == newscript then
if trace_locating then
- logs.report("fileio","old and new script are the same")
+ report_resolvers("old and new script are the same")
end
elseif not find(newscript,scriptpath) then
if trace_locating then
- logs.report("fileio","new script should come from %s",scriptpath)
+ report_resolvers("new script should come from %s",scriptpath)
end
elseif not (find(oldscript,file.removesuffix(newname).."$") or find(oldscript,newname.."$")) then
if trace_locating then
- logs.report("fileio","invalid new script name")
+ report_resolvers("invalid new script name")
end
else
local newdata = io.loaddata(newscript)
if newdata then
if trace_locating then
- logs.report("fileio","old script content replaced by new content")
+ report_resolvers("old script content replaced by new content")
end
io.savedata(oldscript,newdata)
break
elseif trace_locating then
- logs.report("fileio","unable to load new script")
+ report_resolvers("unable to load new script")
end
end
end
@@ -11536,70 +11937,116 @@ if not modules then modules = { } end modules ['data-tmf'] = {
license = "see context related readme files"
}
-local find, gsub, match = string.find, string.gsub, string.match
-local getenv, setenv = os.getenv, os.setenv
+-- = <<
+-- ? ??
+-- < +=
+-- > =+
--- loads *.tmf files in minimal tree roots (to be optimized and documented)
+function resolvers.load_tree(tree)
+ if type(tree) == "string" and tree ~= "" then
-function resolvers.check_environment(tree)
- logs.simpleline()
- setenv('TMP', getenv('TMP') or getenv('TEMP') or getenv('TMPDIR') or getenv('HOME'))
- setenv('TEXOS', getenv('TEXOS') or ("texmf-" .. os.platform))
- setenv('TEXPATH', gsub(tree or "tex","\/+$",''))
- setenv('TEXMFOS', getenv('TEXPATH') .. "/" .. getenv('TEXOS'))
- logs.simpleline()
- logs.simple("preset : TEXPATH => %s", getenv('TEXPATH'))
- logs.simple("preset : TEXOS => %s", getenv('TEXOS'))
- logs.simple("preset : TEXMFOS => %s", getenv('TEXMFOS'))
- logs.simple("preset : TMP => %s", getenv('TMP'))
- logs.simple('')
-end
+ local getenv, setenv = resolvers.getenv, resolvers.setenv
-function resolvers.load_environment(name) -- todo: key=value as well as lua
- local f = io.open(name)
- if f then
- for line in f:lines() do
- if find(line,"^[%%%#]") then
- -- skip comment
- else
- local key, how, value = match(line,"^(.-)%s*([<=>%?]+)%s*(.*)%s*$")
- if how then
- value = gsub(value,"%%(.-)%%", function(v) return getenv(v) or "" end)
- if how == "=" or how == "<<" then
- setenv(key,value)
- elseif how == "?" or how == "??" then
- setenv(key,getenv(key) or value)
- elseif how == "<" or how == "+=" then
- if getenv(key) then
- setenv(key,getenv(key) .. io.fileseparator .. value)
- else
- setenv(key,value)
- end
- elseif how == ">" or how == "=+" then
- if getenv(key) then
- setenv(key,value .. io.pathseparator .. getenv(key))
- else
- setenv(key,value)
- end
- end
- end
- end
+ -- later might listen to the raw osenv var as well
+ local texos = "texmf-" .. os.platform
+
+ local oldroot = environment.texroot
+ local newroot = file.collapse_path(tree)
+
+ local newtree = file.join(newroot,texos)
+ local newpath = file.join(newtree,"bin")
+
+ if not lfs.isdir(newtree) then
+ logs.simple("no '%s' under tree %s",texos,tree)
+ os.exit()
end
- f:close()
+ if not lfs.isdir(newpath) then
+ logs.simple("no '%s/bin' under tree %s",texos,tree)
+ os.exit()
+ end
+
+ local texmfos = newtree
+
+ environment.texroot = newroot
+ environment.texos = texos
+ environment.texmfos = texmfos
+
+ setenv('SELFAUTOPARENT', newroot)
+ setenv('SELFAUTODIR', newtree)
+ setenv('SELFAUTOLOC', newpath)
+ setenv('TEXROOT', newroot)
+ setenv('TEXOS', texos)
+ setenv('TEXMFOS', texmfos)
+ setenv('TEXROOT', newroot)
+ setenv('TEXMFCNF', resolvers.luacnfspec)
+ setenv("PATH", newpath .. io.pathseparator .. getenv("PATH"))
+
+ logs.simple("changing from root '%s' to '%s'",oldroot,newroot)
+ logs.simple("prepending '%s' to binary path",newpath)
+ logs.simple()
end
end
-function resolvers.load_tree(tree)
- if tree and tree ~= "" then
- local setuptex = 'setuptex.tmf'
- if lfs.attributes(tree, "mode") == "directory" then -- check if not nil
- setuptex = tree .. "/" .. setuptex
- else
- setuptex = tree
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['data-lst'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- used in mtxrun
+
+local find, concat, upper, format = string.find, table.concat, string.upper, string.format
+
+resolvers.listers = resolvers.listers or { }
+
+local function tabstr(str)
+ if type(str) == 'table' then
+ return concat(str," | ")
+ else
+ return str
+ end
+end
+
+local function list(list,report,pattern)
+ pattern = pattern and pattern ~= "" and upper(pattern) or ""
+ local instance = resolvers.instance
+ local report = report or texio.write_nl
+ local sorted = table.sortedkeys(list)
+ for i=1,#sorted do
+ local key = sorted[i]
+ if pattern == "" or find(upper(key),pattern) then
+ report(format('%s %s=%s',instance.origins[key] or "---",key,tabstr(list[key])))
end
- if io.exists(setuptex) then
- resolvers.check_environment(tree)
- resolvers.load_environment(setuptex)
+ end
+end
+
+function resolvers.listers.variables (report,pattern) list(resolvers.instance.variables, report,pattern) end
+function resolvers.listers.expansions(report,pattern) list(resolvers.instance.expansions,report,pattern) end
+
+function resolvers.listers.configurations(report,pattern)
+ pattern = pattern and pattern ~= "" and upper(pattern) or ""
+ local report = report or texio.write_nl
+ local instance = resolvers.instance
+ local sorted = table.sortedkeys(instance.kpsevars)
+ for i=1,#sorted do
+ local key = sorted[i]
+ if pattern == "" or find(upper(key),pattern) then
+ report(format("%s\n",key))
+ local order = instance.order
+ for i=1,#order do
+ local str = order[i][key]
+ if str then
+ report(format("\t%s\t%s",i,str))
+ end
+ end
+ report("")
end
end
end
@@ -11708,111 +12155,140 @@ function states.get(key,default)
return states.get_by_tag(states.tag,key,default)
end
---~ states.data.update = {
---~ ["version"] = {
---~ ["major"] = 0,
---~ ["minor"] = 1,
---~ },
---~ ["rsync"] = {
---~ ["server"] = "contextgarden.net",
---~ ["module"] = "minimals",
---~ ["repository"] = "current",
---~ ["flags"] = "-rpztlv --stats",
---~ },
---~ ["tasks"] = {
---~ ["update"] = true,
---~ ["make"] = true,
---~ ["delete"] = false,
---~ },
---~ ["platform"] = {
---~ ["host"] = true,
---~ ["other"] = {
---~ ["mswin"] = false,
---~ ["linux"] = false,
---~ ["linux-64"] = false,
---~ ["osx-intel"] = false,
---~ ["osx-ppc"] = false,
---~ ["sun"] = false,
---~ },
---~ },
---~ ["context"] = {
---~ ["available"] = {"current", "beta", "alpha", "experimental"},
---~ ["selected"] = "current",
---~ },
---~ ["formats"] = {
---~ ["cont-en"] = true,
---~ ["cont-nl"] = true,
---~ ["cont-de"] = false,
---~ ["cont-cz"] = false,
---~ ["cont-fr"] = false,
---~ ["cont-ro"] = false,
---~ },
---~ ["engine"] = {
---~ ["pdftex"] = {
---~ ["install"] = true,
---~ ["formats"] = {
---~ ["pdftex"] = true,
---~ },
---~ },
---~ ["luatex"] = {
---~ ["install"] = true,
---~ ["formats"] = {
---~ },
---~ },
---~ ["xetex"] = {
---~ ["install"] = true,
---~ ["formats"] = {
---~ ["xetex"] = false,
---~ },
---~ },
---~ ["metapost"] = {
---~ ["install"] = true,
---~ ["formats"] = {
---~ ["mpost"] = true,
---~ ["metafun"] = true,
---~ },
---~ },
---~ },
---~ ["fonts"] = {
---~ },
---~ ["doc"] = {
---~ },
---~ ["modules"] = {
---~ ["f-urwgaramond"] = false,
---~ ["f-urwgothic"] = false,
---~ ["t-bnf"] = false,
---~ ["t-chromato"] = false,
---~ ["t-cmscbf"] = false,
---~ ["t-cmttbf"] = false,
---~ ["t-construction-plan"] = false,
---~ ["t-degrade"] = false,
---~ ["t-french"] = false,
---~ ["t-lettrine"] = false,
---~ ["t-lilypond"] = false,
---~ ["t-mathsets"] = false,
---~ ["t-tikz"] = false,
---~ ["t-typearea"] = false,
---~ ["t-vim"] = false,
---~ },
---~ }
-
---~ states.save("teststate", "update")
---~ states.load("teststate", "update")
-
---~ print(states.get_by_tag("update","rsync.server","unknown"))
---~ states.set_by_tag("update","rsync.server","oeps")
---~ print(states.get_by_tag("update","rsync.server","unknown"))
---~ states.save("teststate", "update")
---~ states.load("teststate", "update")
---~ print(states.get_by_tag("update","rsync.server","unknown"))
+
+
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['luat-fmt'] = {
+ version = 1.001,
+ comment = "companion to mtxrun",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- helper for mtxrun
+
+function environment.make_format(name)
+ -- change to format path (early as we need expanded paths)
+ local olddir = lfs.currentdir()
+ local path = caches.getwritablepath("formats") or "" -- maybe platform
+ if path ~= "" then
+ lfs.chdir(path)
+ end
+ logs.simple("format path: %s",lfs.currentdir())
+ -- check source file
+ local texsourcename = file.addsuffix(name,"tex")
+ local fulltexsourcename = resolvers.find_file(texsourcename,"tex") or ""
+ if fulltexsourcename == "" then
+ logs.simple("no tex source file with name: %s",texsourcename)
+ lfs.chdir(olddir)
+ return
+ else
+ logs.simple("using tex source file: %s",fulltexsourcename)
+ end
+ local texsourcepath = dir.expand_name(file.dirname(fulltexsourcename)) -- really needed
+ -- check specification
+ local specificationname = file.replacesuffix(fulltexsourcename,"lus")
+ local fullspecificationname = resolvers.find_file(specificationname,"tex") or ""
+ if fullspecificationname == "" then
+ specificationname = file.join(texsourcepath,"context.lus")
+ fullspecificationname = resolvers.find_file(specificationname,"tex") or ""
+ end
+ if fullspecificationname == "" then
+ logs.simple("unknown stub specification: %s",specificationname)
+ lfs.chdir(olddir)
+ return
+ end
+ local specificationpath = file.dirname(fullspecificationname)
+ -- load specification
+ local usedluastub = nil
+ local usedlualibs = dofile(fullspecificationname)
+ if type(usedlualibs) == "string" then
+ usedluastub = file.join(file.dirname(fullspecificationname),usedlualibs)
+ elseif type(usedlualibs) == "table" then
+ logs.simple("using stub specification: %s",fullspecificationname)
+ local texbasename = file.basename(name)
+ local luastubname = file.addsuffix(texbasename,"lua")
+ local lucstubname = file.addsuffix(texbasename,"luc")
+ -- pack libraries in stub
+ logs.simple("creating initialization file: %s",luastubname)
+ utils.merger.selfcreate(usedlualibs,specificationpath,luastubname)
+ -- compile stub file (does not save that much as we don't use this stub at startup any more)
+ local strip = resolvers.boolean_variable("LUACSTRIP", true)
+ if utils.lua.compile(luastubname,lucstubname,false,strip) and lfs.isfile(lucstubname) then
+ logs.simple("using compiled initialization file: %s",lucstubname)
+ usedluastub = lucstubname
+ else
+ logs.simple("using uncompiled initialization file: %s",luastubname)
+ usedluastub = luastubname
+ end
+ else
+ logs.simple("invalid stub specification: %s",fullspecificationname)
+ lfs.chdir(olddir)
+ return
+ end
+ -- generate format
+ local q = string.quote
+ local command = string.format("luatex --ini --lua=%s %s %sdump",q(usedluastub),q(fulltexsourcename),os.platform == "unix" and "\\\\" or "\\")
+ logs.simple("running command: %s\n",command)
+ os.spawn(command)
+ -- remove related mem files
+ local pattern = file.removesuffix(file.basename(usedluastub)).."-*.mem"
+ -- logs.simple("removing related mplib format with pattern '%s'", pattern)
+ local mp = dir.glob(pattern)
+ if mp then
+ for i=1,#mp do
+ local name = mp[i]
+ logs.simple("removing related mplib format %s", file.basename(name))
+ os.remove(name)
+ end
+ end
+ lfs.chdir(olddir)
+end
+
+function environment.run_format(name,data,more)
+ -- hm, rather old code here; we can now use the file.whatever functions
+ if name and name ~= "" then
+ local barename = file.removesuffix(name)
+ local fmtname = caches.getfirstreadablefile(file.addsuffix(barename,"fmt"),"formats")
+ if fmtname == "" then
+ fmtname = resolvers.find_file(file.addsuffix(barename,"fmt")) or ""
+ end
+ fmtname = resolvers.clean_path(fmtname)
+ if fmtname == "" then
+ logs.simple("no format with name: %s",name)
+ else
+ local barename = file.removesuffix(name) -- expanded name
+ local luaname = file.addsuffix(barename,"luc")
+ if not lfs.isfile(luaname) then
+ luaname = file.addsuffix(barename,"lua")
+ end
+ if not lfs.isfile(luaname) then
+ logs.simple("using format name: %s",fmtname)
+ logs.simple("no luc/lua with name: %s",barename)
+ else
+ local q = string.quote
+ local command = string.format("luatex --fmt=%s --lua=%s %s %s",q(barename),q(luaname),q(data),more ~= "" and q(more) or "")
+ logs.simple("running command: %s",command)
+ os.spawn(command)
+ end
+ end
+ end
+end
end -- of closure
-- end library merge
-own = { } -- not local
+own = { } -- not local, might change
+
+own.libs = { -- order can be made better
-own.libs = { -- todo: check which ones are really needed
'l-string.lua',
'l-lpeg.lua',
'l-table.lua',
@@ -11825,24 +12301,32 @@ own.libs = { -- todo: check which ones are really needed
'l-url.lua',
'l-dir.lua',
'l-boolean.lua',
+ 'l-unicode.lua',
'l-math.lua',
--- 'l-unicode.lua',
--- 'l-tex.lua',
'l-utils.lua',
'l-aux.lua',
--- 'l-xml.lua',
+
+ 'trac-inf.lua',
+ 'trac-set.lua',
'trac-tra.lua',
+ 'trac-log.lua',
+ 'trac-pro.lua',
+ 'luat-env.lua', -- can come before inf (as in mkiv)
+
'lxml-tab.lua',
'lxml-lpt.lua',
--- 'lxml-ent.lua',
+ -- 'lxml-ent.lua',
'lxml-mis.lua',
'lxml-aux.lua',
'lxml-xml.lua',
- 'luat-env.lua',
- 'trac-inf.lua',
- 'trac-log.lua',
- 'data-res.lua',
+
+
+ 'data-ini.lua',
+ 'data-exp.lua',
+ 'data-env.lua',
'data-tmp.lua',
+ 'data-met.lua',
+ 'data-res.lua',
'data-pre.lua',
'data-inp.lua',
'data-out.lua',
@@ -11851,13 +12335,15 @@ own.libs = { -- todo: check which ones are really needed
-- 'data-tex.lua',
-- 'data-bin.lua',
'data-zip.lua',
+ 'data-tre.lua',
'data-crl.lua',
'data-lua.lua',
- 'data-kps.lua', -- so that we can replace kpsewhich
'data-aux.lua', -- updater
- 'data-tmf.lua', -- tree files
- -- needed ?
- 'luat-sta.lua', -- states
+ 'data-tmf.lua',
+ 'data-lst.lua',
+
+ 'luat-sta.lua',
+ 'luat-fmt.lua',
}
-- We need this hack till luatex is fixed.
@@ -11870,36 +12356,61 @@ end
-- End of hack.
-own.name = (environment and environment.ownname) or arg[0] or 'luatools.lua'
+own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua'
+own.path = string.gsub(string.match(own.name,"^(.+)[\\/].-$") or ".","\\","/")
+
+local ownpath, owntree = own.path, environment and environment.ownpath or own.path
+
+own.list = {
+ '.',
+ ownpath ,
+ ownpath .. "/../sources", -- HH's development path
+ owntree .. "/../../texmf-local/tex/context/base",
+ owntree .. "/../../texmf-context/tex/context/base",
+ owntree .. "/../../texmf-dist/tex/context/base",
+ owntree .. "/../../texmf/tex/context/base",
+ owntree .. "/../../../texmf-local/tex/context/base",
+ owntree .. "/../../../texmf-context/tex/context/base",
+ owntree .. "/../../../texmf-dist/tex/context/base",
+ owntree .. "/../../../texmf/tex/context/base",
+}
+if own.path == "." then table.remove(own.list,1) end
-own.path = string.match(own.name,"^(.+)[\\/].-$") or "."
-own.list = { '.' }
-if own.path ~= '.' then
- table.insert(own.list,own.path)
+local function locate_libs()
+ for l=1,#own.libs do
+ local lib = own.libs[l]
+ for p =1,#own.list do
+ local pth = own.list[p]
+ local filename = pth .. "/" .. lib
+ local found = lfs.isfile(filename)
+ if found then
+ return pth
+ end
+ end
+ end
end
-table.insert(own.list,own.path.."/../../../tex/context/base")
-table.insert(own.list,own.path.."/mtx")
-table.insert(own.list,own.path.."/../sources")
-local function locate_libs()
- for _, lib in pairs(own.libs) do
- for _, pth in pairs(own.list) do
- local filename = string.gsub(pth .. "/" .. lib,"\\","/")
+local function load_libs()
+ local found = locate_libs()
+ if found then
+ for l=1,#own.libs do
+ local filename = found .. "/" .. own.libs[l]
local codeblob = loadfile(filename)
if codeblob then
codeblob()
- own.list = { pth } -- speed up te search
- break
end
end
+ else
+ resolvers = nil
end
end
if not resolvers then
- locate_libs()
+ load_libs()
end
+
if not resolvers then
print("")
print("Mtxrun is unable to start up due to lack of libraries. You may")
@@ -11909,7 +12420,11 @@ if not resolvers then
os.exit()
end
-logs.setprogram('MTXrun',"TDS Runner Tool 1.24",environment.arguments["verbose"] or false)
+logs.setprogram('MTXrun',"TDS Runner Tool 1.26")
+
+if environment.arguments["verbose"] then
+ trackers.enable("resolvers.locating")
+end
local instance = resolvers.reset()
@@ -11937,8 +12452,8 @@ messages.help = [[
--ifchanged=filename only execute when given file has changed (md checksum)
--iftouched=old,new only execute when given file has changed (time stamp)
---make create stubs for (context related) scripts
---remove remove stubs (context related) scripts
+--makestubs create stubs for (context related) scripts
+--removestubs remove stubs (context related) scripts
--stubpath=binpath paths where stubs wil be written
--windows create windows (mswin) stubs
--unix create unix (linux) stubs
@@ -11958,8 +12473,24 @@ messages.help = [[
--forcekpse force using kpse (handy when no mkiv and cache installed but less functionality)
--prefixes show supported prefixes
+
+--generate generate file database
+
+--variables show configuration variables
+--expansions show expanded variables
+--configurations show configuration order
+--expand-braces expand complex variable
+--expand-path expand variable (resolve paths)
+--expand-var expand variable (resolve references)
+--show-path show path expansion of ...
+--var-value report value of variable
+--find-file report file location
+--find-path report path of file
+
+--pattern=str filter variables
]]
+
runners.applications = {
["lua"] = "luatex --luaonly",
["luc"] = "luatex --luaonly",
@@ -12012,45 +12543,40 @@ end
function runners.prepare()
local checkname = environment.argument("ifchanged")
- if checkname and checkname ~= "" then
+ local verbose = environment.argument("verbose")
+ if type(checkname) == "string" and checkname ~= "" then
local oldchecksum = file.loadchecksum(checkname)
local newchecksum = file.checksum(checkname)
if oldchecksum == newchecksum then
- logs.simple("file '%s' is unchanged",checkname)
+ if verbose then
+ logs.simple("file '%s' is unchanged",checkname)
+ end
return "skip"
- else
+ elseif verbose then
logs.simple("file '%s' is changed, processing started",checkname)
end
file.savechecksum(checkname)
end
- local oldname, newname = string.split(environment.argument("iftouched") or "", ",")
- if oldname and newname and oldname ~= "" and newname ~= "" then
- if not file.needs_updating(oldname,newname) then
- logs.simple("file '%s' and '%s' have same age",oldname,newname)
- return "skip"
- else
- logs.simple("file '%s' is older than '%s'",oldname,newname)
- end
- end
- local tree = environment.argument('tree') or ""
- if environment.argument('autotree') then
- tree = os.getenv('TEXMFSTART_TREE') or os.getenv('TEXMFSTARTTREE') or tree
- end
- if tree and tree ~= "" then
- resolvers.load_tree(tree)
- end
- local env = environment.argument('environment') or ""
- if env and env ~= "" then
- for _,e in pairs(string.split(env)) do
- -- maybe force suffix when not given
- resolvers.load_tree(e)
+ local touchname = environment.argument("iftouched")
+ if type(touchname) == "string" and touchname ~= "" then
+ local oldname, newname = string.split(touchname, ",")
+ if oldname and newname and oldname ~= "" and newname ~= "" then
+ if not file.needs_updating(oldname,newname) then
+ if verbose then
+ logs.simple("file '%s' and '%s' have same age",oldname,newname)
+ end
+ return "skip"
+ elseif verbose then
+ logs.simple("file '%s' is older than '%s'",oldname,newname)
+ end
end
end
local runpath = environment.argument("path")
- if runpath and not lfs.chdir(runpath) then
+ if type(runpath) == "string" and not lfs.chdir(runpath) then
logs.simple("unable to change to path '%s'",runpath)
return "error"
end
+ runners.prepare = function() end
return "run"
end
@@ -12165,7 +12691,7 @@ function runners.execute_program(fullname)
return false
end
--- the --usekpse flag will fallback on kpse (hm, we can better update mtx-stubs)
+-- the --usekpse flag will fallback (not default) on kpse (hm, we can better update mtx-stubs)
local windows_stub = '@echo off\013\010setlocal\013\010set ownpath=%%~dp0%%\013\010texlua "%%ownpath%%mtxrun.lua" --usekpse --execute %s %%*\013\010endlocal\013\010'
local unix_stub = '#!/bin/sh\010mtxrun --usekpse --execute %s \"$@\"\010'
@@ -12288,7 +12814,7 @@ end
function runners.launch_file(filename)
instance.allresults = true
- logs.setverbose(true)
+ trackers.enable("resolvers.locating")
local pattern = environment.arguments["pattern"]
if not pattern or pattern == "" then
pattern = filename
@@ -12368,7 +12894,19 @@ function runners.find_mtx_script(filename)
return fullname
end
-function runners.execute_ctx_script(filename)
+function runners.register_arguments(...)
+ local arguments = environment.arguments_after
+ local passedon = { ... }
+ for i=#passedon,1,-1 do
+ local pi = passedon[i]
+ if pi then
+ table.insert(arguments,1,pi)
+ end
+ end
+end
+
+function runners.execute_ctx_script(filename,...)
+ runners.register_arguments(...)
local arguments = environment.arguments_after
local fullname = runners.find_mtx_script(filename) or ""
if file.extname(fullname) == "cld" then
@@ -12381,7 +12919,7 @@ function runners.execute_ctx_script(filename)
-- retry after generate but only if --autogenerate
if fullname == "" and environment.argument("autogenerate") then -- might become the default
instance.renewcache = true
- logs.setverbose(true)
+ trackers.enable("resolvers.locating")
resolvers.load()
--
fullname = runners.find_mtx_script(filename) or ""
@@ -12421,10 +12959,9 @@ function runners.execute_ctx_script(filename)
return true
end
else
- -- logs.setverbose(true)
if filename == "" or filename == "help" then
local context = resolvers.find_file("mtx-context.lua")
- logs.setverbose(true)
+ trackers.enable("resolvers.locating")
if context ~= "" then
local result = dir.glob((string.gsub(context,"mtx%-context","mtx-*"))) -- () needed
local valid = { }
@@ -12558,80 +13095,317 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
end
+ function runners.loadbase()
+ end
+
else
- resolvers.load()
+ function runners.loadbase(...)
+ if not resolvers.load(...) then
+ logs.simple("forcing cache reload")
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ if not resolvers.load(...) then
+ logs.simple("the resolver databases are not present or outdated")
+ end
+ end
+ end
end
+resolvers.load_tree(environment.argument('tree'))
+
if environment.argument("selfmerge") then
+
-- embed used libraries
- utils.merger.selfmerge(own.name,own.libs,own.list)
+
+ runners.loadbase()
+ local found = locate_libs()
+ if found then
+ utils.merger.selfmerge(own.name,own.libs,{ found })
+ end
+
elseif environment.argument("selfclean") then
+
-- remove embedded libraries
+
+ runners.loadbase()
utils.merger.selfclean(own.name)
+
elseif environment.argument("selfupdate") then
- logs.setverbose(true)
+
+ runners.loadbase()
+ trackers.enable("resolvers.locating")
resolvers.update_script(own.name,"mtxrun")
+
elseif environment.argument("ctxlua") or environment.argument("internal") then
+
-- run a script by loading it (using libs)
+
+ runners.loadbase()
ok = runners.execute_script(filename,true)
+
elseif environment.argument("script") or environment.argument("scripts") then
+
-- run a script by loading it (using libs), pass args
+
+ runners.loadbase()
if is_mkii_stub then
- -- execute mkii script
ok = runners.execute_script(filename,false,true)
else
ok = runners.execute_ctx_script(filename)
end
+
elseif environment.argument("execute") then
+
-- execute script
+
+ runners.loadbase()
ok = runners.execute_script(filename)
+
elseif environment.argument("direct") then
+
-- equals bin:
+
+ runners.loadbase()
ok = runners.execute_program(filename)
+
elseif environment.argument("edit") then
+
-- edit file
+
+ runners.loadbase()
runners.edit_script(filename)
+
elseif environment.argument("launch") then
+
+ runners.loadbase()
runners.launch_file(filename)
-elseif environment.argument("make") then
- -- make stubs
+
+elseif environment.argument("makestubs") then
+
+ -- make stubs (depricated)
+
runners.handle_stubs(true)
-elseif environment.argument("remove") then
- -- remove stub
+
+elseif environment.argument("removestubs") then
+
+ -- remove stub (depricated)
+
+ runners.loadbase()
runners.handle_stubs(false)
+
elseif environment.argument("resolve") then
+
-- resolve string
+
+ runners.loadbase()
runners.resolve_string(filename)
+
elseif environment.argument("locate") then
+
-- locate file
+
+ runners.loadbase()
runners.locate_file(filename)
-elseif environment.argument("platform")then
+
+elseif environment.argument("platform") or environment.argument("show-platform") then
+
-- locate platform
+
+ runners.loadbase()
runners.locate_platform()
+
elseif environment.argument("prefixes") then
+
+ runners.loadbase()
runners.prefixes()
+
elseif environment.argument("timedrun") then
+
-- locate platform
+
+ runners.loadbase()
runners.timedrun(filename)
+
+elseif environment.argument("variables") or environment.argument("show-variables") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--variables",filename)
+
+ resolvers.load("nofiles")
+ resolvers.listers.variables(false,environment.argument("pattern"))
+
+elseif environment.argument("expansions") or environment.argument("show-expansions") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--expansions",filename)
+
+ resolvers.load("nofiles")
+ resolvers.listers.expansions(false,environment.argument("pattern"))
+
+elseif environment.argument("configurations") or environment.argument("show-configurations") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--configurations",filename)
+
+ resolvers.load("nofiles")
+ resolvers.listers.configurations(false,environment.argument("pattern"))
+
+elseif environment.argument("find-file") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename)
+
+ resolvers.load()
+ local pattern = environment.argument("pattern")
+ local format = environment.arguments["format"] or instance.format
+ if not pattern then
+ runners.register_arguments(filename)
+ environment.initialize_arguments(environment.arguments_after)
+ resolvers.for_files(resolvers.find_files,environment.files,format)
+ elseif type(pattern) == "string" then
+ instance.allresults = true -- brrrr
+ resolvers.for_files(resolvers.find_files,{ pattern }, format)
+ end
+
+elseif environment.argument("find-path") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--find-path",filename)
+
+ resolvers.load()
+ local path = resolvers.find_path(filename, instance.my_format)
+ if logs.verbose then
+ logs.simple(path)
+ else
+ print(path)
+ end
+
+elseif environment.argument("expand-braces") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initialize_arguments(environment.arguments_after)
+ resolvers.for_files(resolvers.expand_braces, environment.files)
+
+elseif environment.argument("expand-path") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--expand-path",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initialize_arguments(environment.arguments_after)
+ resolvers.for_files(resolvers.expand_path, environment.files)
+
+elseif environment.argument("expand-var") or environment.argument("expand-variable") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--expand-var",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initialize_arguments(environment.arguments_after)
+ resolvers.for_files(resolvers.expand_var, environment.files)
+
+elseif environment.argument("show-path") or environment.argument("path-value") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--show-path",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initialize_arguments(environment.arguments_after)
+ resolvers.for_files(resolvers.show_path, environment.files)
+
+elseif environment.argument("var-value") or environment.argument("show-value") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--show-value",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initialize_arguments(environment.arguments_after)
+ resolvers.for_files(resolvers.var_value,environment.files)
+
+elseif environment.argument("format-path") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--format-path",filename)
+
+ resolvers.load()
+ logs.simple(caches.getwritablepath("format"))
+
+elseif environment.argument("pattern") then
+
+ -- luatools
+
+ runners.execute_ctx_script("mtx-base","--pattern='" .. environment.argument("pattern") .. "'",filename)
+
+elseif environment.argument("generate") then
+
+ -- luatools
+
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+
+elseif environment.argument("make") or environment.argument("ini") or environment.argument("compile") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--make",filename)
+
+ resolvers.load()
+ trackers.enable("resolvers.locating")
+ environment.make_format(filename)
+
+elseif environment.argument("run") then
+
+ -- luatools
+
+ runners.execute_ctx_script("mtx-base","--run",filename)
+
+elseif environment.argument("fmt") then
+
+ -- luatools
+
+ runners.execute_ctx_script("mtx-base","--fmt",filename)
+
+elseif environment.argument("help") and filename=='base' then
+
+ -- luatools
+
+ runners.execute_ctx_script("mtx-base","--help")
+
elseif environment.argument("help") or filename=='help' or filename == "" then
+
logs.help(messages.help)
- -- execute script
+
elseif filename:find("^bin:") then
+
+ runners.loadbase()
ok = runners.execute_program(filename)
+
elseif is_mkii_stub then
+
-- execute mkii script
+
+ runners.loadbase()
ok = runners.execute_script(filename,false,true)
-else
+
+elseif false then
+
+ runners.loadbase()
ok = runners.execute_ctx_script(filename)
if not ok then
ok = runners.execute_script(filename)
end
+
+else
+
+ runners.execute_ctx_script("mtx-base",filename)
+
+end
+
+if logs.verbose then
+ logs.simpleline()
+ logs.simple("runtime: %0.3f seconds",os.runtime())
end
-if os.platform == "unix" then
- io.write("\n")
+if os.type ~= "windows" then
+ texio.write("\n")
end
if ok == false then ok = 1 elseif ok == true then ok = 0 end
diff --git a/scripts/context/stubs/mswin/setuptex.bat b/scripts/context/stubs/mswin/setuptex.bat
new file mode 100644
index 000000000..52c60f155
--- /dev/null
+++ b/scripts/context/stubs/mswin/setuptex.bat
@@ -0,0 +1,34 @@
+@ECHO OFF
+
+REM author: Hans Hagen - PRAGMA ADE - Hasselt NL - www.pragma-ade.com
+
+:userpath
+
+if "%SETUPTEX%"=="done" goto done
+
+if "%~s1"=="" goto selftest
+
+set TEXMFOS=%~s1texmf-mswin
+if exist %TEXMFOS%\bin\mtxrun.exe goto start
+
+set TEXMFOS=%~s1\texmf-mswin
+if exist %TEXMFOS%\bin\mtxrun.exe goto start
+
+:selftest
+
+set TEXMFOS=%~d0%~p0texmf-mswin
+if exist %TEXMFOS%\bin\mtxrun.exe goto start
+
+set TEXMFOS=%~d0%~p0\texmf-mswin
+if exist %TEXMFOS%\bin\mtxrun.exe goto start
+
+:start
+
+set PATH=%TEXMFOS%\bin;%PATH%
+
+:register
+
+set SETUPTEX=done
+set CTXMINIMAL=yes
+
+:done
diff --git a/scripts/context/stubs/source/mtxrun_dll.c b/scripts/context/stubs/source/mtxrun_dll.c
index 5b7cd31a0..400ed6778 100644
--- a/scripts/context/stubs/source/mtxrun_dll.c
+++ b/scripts/context/stubs/source/mtxrun_dll.c
@@ -5,8 +5,8 @@
Public Domain
Originally written in 2010 by Tomasz M. Trzeciak and Hans Hagen
- This program is derived from the 'runscript' program originally
- written in 2009 by T.M. Trzeciak. It has been adapted for use in
+ This program is derived from the 'runscript' program originally
+ written in 2009 by T.M. Trzeciak. It has been adapted for use in
ConTeXt MkIV.
Comment:
@@ -18,26 +18,26 @@
mtxrun --script font --reload
Here mtxrun is a lua script. In order to avoid the usage of a cmd
- file on windows this runner will start texlua directly. If the
- shared library luatex.dll is available, texlua will be started in
- the same process avoiding thus any additional overhead. Otherwise
+ file on windows this runner will start texlua directly. If the
+ shared library luatex.dll is available, texlua will be started in
+ the same process avoiding thus any additional overhead. Otherwise
it will be spawned in a new proces.
We also don't want to use other runners, like those that use kpse
to locate the script as this is exactly what mtxrun itself is doing
already. Therefore the runscript program is adapted to a more direct
approach suitable for mtxrun.
-
+
Compilation:
with gcc (size optimized):
- gcc -Os -s -shared -o mtxrun.dll mtxrun_dll.c
+ gcc -Os -s -shared -o mtxrun.dll mtxrun_dll.c
gcc -Os -s -o mtxrun.exe mtxrun_exe.c -L./ -lmtxrun
with tcc (extra small size):
-
- tcc -shared -o mtxrun.dll mtxrun_dll.c
+
+ tcc -shared -o mtxrun.dll mtxrun_dll.c
tcc -o mtxrun.exe mtxrun_exe.c mtxrun.def
************************************************************************/
@@ -65,9 +65,9 @@ HMODULE dllluatex = NULL;
typedef int ( *mainlikeproc )( int, char ** );
#ifdef STATIC
-int main( int argc, char *argv[] )
+int main( int argc, char *argv[] )
#else
-__declspec(dllexport) int dllrunscript( int argc, char *argv[] )
+__declspec(dllexport) int dllrunscript( int argc, char *argv[] )
#endif
{
char *s, *luatexfname, *argstr, **lua_argv;
@@ -75,51 +75,68 @@ __declspec(dllexport) int dllrunscript( int argc, char *argv[] )
int passprogname = 0;
// directory of this module/executable
-
- HMODULE module_handle = GetModuleHandle( "mtxrun.dll" );
+
+ HMODULE module_handle = GetModuleHandle( "mtxrun.dll" );
// if ( module_handle == NULL ) exe path will be used, which is OK too
k = (int) GetModuleFileName( module_handle, dirpath, MAX_PATH );
- if ( !k || ( k == MAX_PATH ) )
+ if ( !k || ( k == MAX_PATH ) )
DIE( "unable to determine a valid module name\n" );
s = strrchr(dirpath, '\\');
if ( s == NULL ) DIE( "no directory part in module path: %s\n", dirpath );
*(++s) = '\0'; //remove file name, leave trailing backslash
-
+
// program name
-
+
k = strlen(argv[0]);
while ( k && (argv[0][k-1] != '/') && (argv[0][k-1] != '\\') ) k--;
strcpy(progname, &argv[0][k]);
s = progname;
if ( s = strrchr(s, '.') ) *s = '\0'; // remove file extension part
-
+
// script path
-
+
strcpy( scriptpath, dirpath );
k = strlen(progname);
if ( k < 6 ) k = 6; // in case the program name is shorter than "mtxrun"
- if ( strlen(dirpath) + k + 4 >= MAX_PATH )
+ if ( strlen(dirpath) + k + 4 >= MAX_PATH )
DIE( "path too long: %s%s\n", dirpath, progname );
- if ( ( strcmpi(progname,"mtxrun") == 0 ) || ( strcmpi(progname,"luatools") == 0 ) ) {
+
+ if ( strcmpi(progname,"mtxrun") == 0 ) {
strcat( scriptpath, progname );
strcat( scriptpath, ".lua" );
+ } else if ( strcmpi(progname,"luatools") == 0 ) {
+ strcat( scriptpath, "mtxrun.lua" );
+ strcpy( progname, "base" );
+ passprogname = 1;
+ } else if ( strcmpi(progname,"texmfstart") == 0 ) {
+ strcat( scriptpath, "mtxrun.lua" );
} else {
strcat( scriptpath, "mtxrun.lua" );
- if ( strcmpi(progname,"texmfstart") != 0 ) passprogname = 1;
+ passprogname = 1;
}
- if ( GetFileAttributes(scriptpath) == INVALID_FILE_ATTRIBUTES )
+
+ if ( GetFileAttributes(scriptpath) == INVALID_FILE_ATTRIBUTES )
DIE( "file not found: %s\n", scriptpath );
-
+
// find texlua.exe
-
- if ( !SearchPath(
- getenv( "PATH" ), // path to search (optional)
- "texlua.exe", // file name to search
- NULL, // file extension to add (optional)
- MAX_PATH, // output buffer size
- luatexpath, // output buffer pointer
- &luatexfname ) // pointer to a file part in the output buffer (optional)
- ) DIE( "unable to locate texlua.exe on the search path" );
+
+ if ( !SearchPath(
+ getenv( "PATH" ), // path to search (optional)
+ "texlua.exe", // file name to search
+ NULL, // file extension to add (optional)
+ MAX_PATH, // output buffer size
+ luatexpath, // output buffer pointer
+ &luatexfname ) // pointer to a file part in the output buffer (optional)
+ )
+ if ( !SearchPath(
+ dirpath, // path to search (optional)
+ "texlua.exe", // file name to search
+ NULL, // file extension to add (optional)
+ MAX_PATH, // output buffer size
+ luatexpath, // output buffer pointer
+ &luatexfname ) // pointer to a file part in the output buffer (optional)
+ )
+ DIE( "unable to locate texlua.exe on the search path" );
// link directly with luatex.dll if available in texlua's dir
@@ -127,11 +144,11 @@ __declspec(dllexport) int dllrunscript( int argc, char *argv[] )
if ( dllluatex = LoadLibrary(luatexpath) )
{
mainlikeproc dllluatexmain = (mainlikeproc) GetProcAddress( dllluatex, "dllluatexmain" );
- if ( dllluatexmain == NULL )
+ if ( dllluatexmain == NULL )
DIE( "unable to locate dllluatexmain procedure in luatex.dll" );
-
+
// set up argument list for texlua script
-
+
lua_argv = (char **)malloc( (argc + 4) * sizeof(char *) );
if ( lua_argv == NULL ) DIE( "out of memory\n" );
lua_argv[lua_argc=0] = texlua_name;
@@ -139,18 +156,18 @@ __declspec(dllexport) int dllrunscript( int argc, char *argv[] )
if (passprogname) {
lua_argv[++lua_argc] = "--script";
lua_argv[++lua_argc] = progname;
- }
+ }
for ( k = 1; k < argc; k++ ) lua_argv[++lua_argc] = argv[k];
lua_argv[++lua_argc] = NULL;
// call texlua interpreter
// dllluatexmain never returns, but we pretend that it does
-
+
k = dllluatexmain( lua_argc, lua_argv );
if (lua_argv) free( lua_argv );
return k;
}
-
+
// we are still here, so no luatex.dll; spawn texlua.exe instead
strcpy( luatexfname, "texlua.exe" );
@@ -163,24 +180,24 @@ __declspec(dllexport) int dllrunscript( int argc, char *argv[] )
strcat( cmdline, " --script " );
strcat( cmdline, progname );
}
-
+
argstr = GetCommandLine(); // get the command line of this process
if ( argstr == NULL ) DIE( "unable to retrieve the command line string\n" );
// skip over argv[0] in the argument string
// (it can contain embedded double quotes if launched from cmd.exe!)
-
- for ( quoted = 0; (*argstr) && ( !IS_WHITESPACE(*argstr) || quoted ); argstr++ )
+
+ for ( quoted = 0; (*argstr) && ( !IS_WHITESPACE(*argstr) || quoted ); argstr++ )
if (*argstr == '"') quoted = !quoted;
-
+
// pass through all the arguments
-
- if ( strlen(cmdline) + strlen(argstr) >= MAX_CMD )
+
+ if ( strlen(cmdline) + strlen(argstr) >= MAX_CMD )
DIE( "command line string too long:\n%s%s\n", cmdline, argstr );
- strcat( cmdline, argstr );
-
+ strcat( cmdline, argstr );
+
// create child process
-
+
STARTUPINFO si;
PROCESS_INFORMATION pi;
ZeroMemory( &si, sizeof(si) );
@@ -192,7 +209,7 @@ __declspec(dllexport) int dllrunscript( int argc, char *argv[] )
si.hStdOutput = GetStdHandle( STD_OUTPUT_HANDLE );
si.hStdError = GetStdHandle( STD_ERROR_HANDLE );
ZeroMemory( &pi, sizeof(pi) );
-
+
if( !CreateProcess(
NULL, // module name (uses command line if NULL)
cmdline, // command line
@@ -205,17 +222,17 @@ __declspec(dllexport) int dllrunscript( int argc, char *argv[] )
&si, // STARTUPINFO structure
&pi ) // PROCESS_INFORMATION structure
) DIE( "command execution failed: %s\n", cmdline );
-
+
DWORD ret = 0;
CloseHandle( pi.hThread ); // thread handle is not needed
if ( WaitForSingleObject( pi.hProcess, INFINITE ) == WAIT_OBJECT_0 ) {
- if ( !GetExitCodeProcess( pi.hProcess, &ret) )
+ if ( !GetExitCodeProcess( pi.hProcess, &ret) )
DIE( "unable to retrieve process exit code: %s\n", cmdline );
} else DIE( "failed to wait for process termination: %s\n", cmdline );
CloseHandle( pi.hProcess );
-
+
// propagate exit code from the child process
-
- return ret;
-
+
+ return ret;
+
}
diff --git a/scripts/context/stubs/unix/luatools b/scripts/context/stubs/unix/luatools
index 1d87322c1..c17b483be 100644
--- a/scripts/context/stubs/unix/luatools
+++ b/scripts/context/stubs/unix/luatools
@@ -1,8185 +1,2 @@
-#!/usr/bin/env texlua
-
-if not modules then modules = { } end modules ['luatools'] = {
- version = 1.001,
- comment = "companion to context.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format = string.format
-
--- one can make a stub:
---
--- #!/bin/sh
--- env LUATEXDIR=/....../texmf/scripts/context/lua texlua luatools.lua "$@"
-
--- Although this script is part of the ConTeXt distribution it is
--- relatively indepent of ConTeXt. The same is true for some of
--- the luat files. We may may make them even less dependent in
--- the future. As long as Luatex is under development the
--- interfaces and names of functions may change.
-
--- For the sake of independence we optionally can merge the library
--- code here. It's too much code, but that does not harm. Much of the
--- library code is used elsewhere. We don't want dependencies on
--- Lua library paths simply because these scripts are located in the
--- texmf tree and not in some Lua path. Normally this merge is not
--- needed when texmfstart is used, or when the proper stub is used or
--- when (windows) suffix binding is active.
-
-texlua = true
-
--- begin library merge
-
-
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['l-string'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local sub, gsub, find, match, gmatch, format, char, byte, rep, lower = string.sub, string.gsub, string.find, string.match, string.gmatch, string.format, string.char, string.byte, string.rep, string.lower
-local lpegmatch = lpeg.match
-
--- some functions may disappear as they are not used anywhere
-
-if not string.split then
-
- -- this will be overloaded by a faster lpeg variant
-
- function string:split(pattern)
- if #self > 0 then
- local t = { }
- for s in gmatch(self..pattern,"(.-)"..pattern) do
- t[#t+1] = s
- end
- return t
- else
- return { }
- end
- end
-
-end
-
-local chr_to_esc = {
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["^"] = "%^", ["$"] = "%$",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%(", [")"] = "%)",
- ["{"] = "%{", ["}"] = "%}"
-}
-
-string.chr_to_esc = chr_to_esc
-
-function string:esc() -- variant 2
- return (gsub(self,"(.)",chr_to_esc))
-end
-
-function string:unquote()
- return (gsub(self,"^([\"\'])(.*)%1$","%2"))
-end
-
---~ function string:unquote()
---~ if find(self,"^[\'\"]") then
---~ return sub(self,2,-2)
---~ else
---~ return self
---~ end
---~ end
-
-function string:quote() -- we could use format("%q")
- return format("%q",self)
-end
-
-function string:count(pattern) -- variant 3
- local n = 0
- for _ in gmatch(self,pattern) do
- n = n + 1
- end
- return n
-end
-
-function string:limit(n,sentinel)
- if #self > n then
- sentinel = sentinel or " ..."
- return sub(self,1,(n-#sentinel)) .. sentinel
- else
- return self
- end
-end
-
---~ function string:strip() -- the .- is quite efficient
---~ -- return match(self,"^%s*(.-)%s*$") or ""
---~ -- return match(self,'^%s*(.*%S)') or '' -- posted on lua list
---~ return find(s,'^%s*$') and '' or match(s,'^%s*(.*%S)')
---~ end
-
-do -- roberto's variant:
- local space = lpeg.S(" \t\v\n")
- local nospace = 1 - space
- local stripper = space^0 * lpeg.C((space^0 * nospace^1)^0)
- function string.strip(str)
- return lpegmatch(stripper,str) or ""
- end
-end
-
-function string:is_empty()
- return not find(self,"%S")
-end
-
-function string:enhance(pattern,action)
- local ok, n = true, 0
- while ok do
- ok = false
- self = gsub(self,pattern, function(...)
- ok, n = true, n + 1
- return action(...)
- end)
- end
- return self, n
-end
-
-local chr_to_hex, hex_to_chr = { }, { }
-
-for i=0,255 do
- local c, h = char(i), format("%02X",i)
- chr_to_hex[c], hex_to_chr[h] = h, c
-end
-
-function string:to_hex()
- return (gsub(self or "","(.)",chr_to_hex))
-end
-
-function string:from_hex()
- return (gsub(self or "","(..)",hex_to_chr))
-end
-
-if not string.characters then
-
- local function nextchar(str, index)
- index = index + 1
- return (index <= #str) and index or nil, sub(str,index,index)
- end
- function string:characters()
- return nextchar, self, 0
- end
- local function nextbyte(str, index)
- index = index + 1
- return (index <= #str) and index or nil, byte(sub(str,index,index))
- end
- function string:bytes()
- return nextbyte, self, 0
- end
-
-end
-
--- we can use format for this (neg n)
-
-function string:rpadd(n,chr)
- local m = n-#self
- if m > 0 then
- return self .. rep(chr or " ",m)
- else
- return self
- end
-end
-
-function string:lpadd(n,chr)
- local m = n-#self
- if m > 0 then
- return rep(chr or " ",m) .. self
- else
- return self
- end
-end
-
-string.padd = string.rpadd
-
-function is_number(str) -- tonumber
- return find(str,"^[%-%+]?[%d]-%.?[%d+]$") == 1
-end
-
---~ print(is_number("1"))
---~ print(is_number("1.1"))
---~ print(is_number(".1"))
---~ print(is_number("-0.1"))
---~ print(is_number("+0.1"))
---~ print(is_number("-.1"))
---~ print(is_number("+.1"))
-
-function string:split_settings() -- no {} handling, see l-aux for lpeg variant
- if find(self,"=") then
- local t = { }
- for k,v in gmatch(self,"(%a+)=([^%,]*)") do
- t[k] = v
- end
- return t
- else
- return nil
- end
-end
-
-local patterns_escapes = {
- ["-"] = "%-",
- ["."] = "%.",
- ["+"] = "%+",
- ["*"] = "%*",
- ["%"] = "%%",
- ["("] = "%)",
- [")"] = "%)",
- ["["] = "%[",
- ["]"] = "%]",
-}
-
-function string:pattesc()
- return (gsub(self,".",patterns_escapes))
-end
-
-local simple_escapes = {
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-function string:simpleesc()
- return (gsub(self,".",simple_escapes))
-end
-
-function string:tohash()
- local t = { }
- for s in gmatch(self,"([^, ]+)") do -- lpeg
- t[s] = true
- end
- return t
-end
-
-local pattern = lpeg.Ct(lpeg.C(1)^0)
-
-function string:totable()
- return lpegmatch(pattern,self)
-end
-
---~ local t = {
---~ "1234567123456712345671234567",
---~ "a\tb\tc",
---~ "aa\tbb\tcc",
---~ "aaa\tbbb\tccc",
---~ "aaaa\tbbbb\tcccc",
---~ "aaaaa\tbbbbb\tccccc",
---~ "aaaaaa\tbbbbbb\tcccccc",
---~ }
---~ for k,v do
---~ print(string.tabtospace(t[k]))
---~ end
-
-function string.tabtospace(str,tab)
- -- we don't handle embedded newlines
- while true do
- local s = find(str,"\t")
- if s then
- if not tab then tab = 7 end -- only when found
- local d = tab-(s-1) % tab
- if d > 0 then
- str = gsub(str,"\t",rep(" ",d),1)
- else
- str = gsub(str,"\t","",1)
- end
- else
- break
- end
- end
- return str
-end
-
-function string:compactlong() -- strips newlines and leading spaces
- self = gsub(self,"[\n\r]+ *","")
- self = gsub(self,"^ *","")
- return self
-end
-
-function string:striplong() -- strips newlines and leading spaces
- self = gsub(self,"^%s*","")
- self = gsub(self,"[\n\r]+ *","\n")
- return self
-end
-
-function string:topattern(lowercase,strict)
- if lowercase then
- self = lower(self)
- end
- self = gsub(self,".",simple_escapes)
- if self == "" then
- self = ".*"
- elseif strict then
- self = "^" .. self .. "$"
- end
- return self
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['l-lpeg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local lpeg = require("lpeg")
-
-lpeg.patterns = lpeg.patterns or { } -- so that we can share
-local patterns = lpeg.patterns
-
-local P, R, S, Ct, C, Cs, Cc, V = lpeg.P, lpeg.R, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.V
-local match = lpeg.match
-
-local digit, sign = R('09'), S('+-')
-local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local utf8byte = R("\128\191")
-
-patterns.utf8byte = utf8byte
-patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8byte
-patterns.utf8three = R("\224\239") * utf8byte * utf8byte
-patterns.utf8four = R("\240\244") * utf8byte * utf8byte * utf8byte
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.float = sign^0 * digit^0 * P('.') * digit^1
-patterns.number = patterns.float + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
-patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = S(" ")
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto)
-patterns.newline = crlf + cr + lf
-patterns.nonspace = 1 - patterns.space
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
-patterns.utf8 = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-patterns.utfbom = P('\000\000\254\255') + P('\255\254\000\000') + P('\255\254') + P('\254\255') + P('\239\187\191')
-
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
-end
-
-function lpeg.splitter(pattern, action)
- return (((1-P(pattern))^1)/action+1)^0
-end
-
-local spacing = patterns.spacer^0 * patterns.newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-local capture = Ct(content^0)
-
-function string:splitlines()
- return match(capture,self)
-end
-
-patterns.textline = content
-
---~ local p = lpeg.splitat("->",false) print(match(p,"oeps->what->more")) -- oeps what more
---~ local p = lpeg.splitat("->",true) print(match(p,"oeps->what->more")) -- oeps what->more
---~ local p = lpeg.splitat("->",false) print(match(p,"oeps")) -- oeps
---~ local p = lpeg.splitat("->",true) print(match(p,"oeps")) -- oeps
-
-local splitters_s, splitters_m = { }, { }
-
-local function splitat(separator,single)
- local splitter = (single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator = P(separator)
- if single then
- local other, any = C((1 - separator)^0), P(1)
- splitter = other * (separator * C(any^0) + "") -- ?
- splitters_s[separator] = splitter
- else
- local other = C((1 - separator)^0)
- splitter = other * (separator * other)^0
- splitters_m[separator] = splitter
- end
- end
- return splitter
-end
-
-lpeg.splitat = splitat
-
-local cache = { }
-
-function lpeg.split(separator,str)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string:split(separator)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,self)
-end
-
-lpeg.splitters = cache
-
-local cache = { }
-
-function lpeg.checkedsplit(separator,str)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^0)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string:checkedsplit(separator)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^0)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,self)
-end
-
---~ function lpeg.append(list,pp)
---~ local p = pp
---~ for l=1,#list do
---~ if p then
---~ p = p + P(list[l])
---~ else
---~ p = P(list[l])
---~ end
---~ end
---~ return p
---~ end
-
---~ from roberto's site:
-
-local f1 = string.byte
-
-local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end
-local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
-local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
-patterns.utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['l-table'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-table.join = table.concat
-
-local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
-local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
-local getmetatable, setmetatable = getmetatable, setmetatable
-local type, next, tostring, tonumber, ipairs = type, next, tostring, tonumber, ipairs
-local unpack = unpack or table.unpack
-
-function table.strip(tab)
- local lst = { }
- for i=1,#tab do
- local s = gsub(tab[i],"^%s*(.-)%s*$","%1")
- if s == "" then
- -- skip this one
- else
- lst[#lst+1] = s
- end
- end
- return lst
-end
-
-function table.keys(t)
- local k = { }
- for key, _ in next, t do
- k[#k+1] = key
- end
- return k
-end
-
-local function compare(a,b)
- return (tostring(a) < tostring(b))
-end
-
-local function sortedkeys(tab)
- local srt, kind = { }, 0 -- 0=unknown 1=string, 2=number 3=mixed
- for key,_ in next, tab do
- srt[#srt+1] = key
- if kind == 3 then
- -- no further check
- else
- local tkey = type(key)
- if tkey == "string" then
- -- if kind == 2 then kind = 3 else kind = 1 end
- kind = (kind == 2 and 3) or 1
- elseif tkey == "number" then
- -- if kind == 1 then kind = 3 else kind = 2 end
- kind = (kind == 1 and 3) or 2
- else
- kind = 3
- end
- end
- end
- if kind == 0 or kind == 3 then
- sort(srt,compare)
- else
- sort(srt)
- end
- return srt
-end
-
-local function sortedhashkeys(tab) -- fast one
- local srt = { }
- for key,_ in next, tab do
- srt[#srt+1] = key
- end
- sort(srt)
- return srt
-end
-
-table.sortedkeys = sortedkeys
-table.sortedhashkeys = sortedhashkeys
-
-function table.sortedhash(t)
- local s = sortedhashkeys(t) -- maybe just sortedkeys
- local n = 0
- local function kv(s)
- n = n + 1
- local k = s[n]
- return k, t[k]
- end
- return kv, s
-end
-
-table.sortedpairs = table.sortedhash
-
-function table.append(t, list)
- for _,v in next, list do
- insert(t,v)
- end
-end
-
-function table.prepend(t, list)
- for k,v in next, list do
- insert(t,k,v)
- end
-end
-
-function table.merge(t, ...) -- first one is target
- t = t or {}
- local lst = {...}
- for i=1,#lst do
- for k, v in next, lst[i] do
- t[k] = v
- end
- end
- return t
-end
-
-function table.merged(...)
- local tmp, lst = { }, {...}
- for i=1,#lst do
- for k, v in next, lst[i] do
- tmp[k] = v
- end
- end
- return tmp
-end
-
-function table.imerge(t, ...)
- local lst = {...}
- for i=1,#lst do
- local nst = lst[i]
- for j=1,#nst do
- t[#t+1] = nst[j]
- end
- end
- return t
-end
-
-function table.imerged(...)
- local tmp, lst = { }, {...}
- for i=1,#lst do
- local nst = lst[i]
- for j=1,#nst do
- tmp[#tmp+1] = nst[j]
- end
- end
- return tmp
-end
-
-local function fastcopy(old) -- fast one
- if old then
- local new = { }
- for k,v in next, old do
- if type(v) == "table" then
- new[k] = fastcopy(v) -- was just table.copy
- else
- new[k] = v
- end
- end
- -- optional second arg
- local mt = getmetatable(old)
- if mt then
- setmetatable(new,mt)
- end
- return new
- else
- return { }
- end
-end
-
-local function copy(t, tables) -- taken from lua wiki, slightly adapted
- tables = tables or { }
- local tcopy = {}
- if not tables[t] then
- tables[t] = tcopy
- end
- for i,v in next, t do -- brrr, what happens with sparse indexed
- if type(i) == "table" then
- if tables[i] then
- i = tables[i]
- else
- i = copy(i, tables)
- end
- end
- if type(v) ~= "table" then
- tcopy[i] = v
- elseif tables[v] then
- tcopy[i] = tables[v]
- else
- tcopy[i] = copy(v, tables)
- end
- end
- local mt = getmetatable(t)
- if mt then
- setmetatable(tcopy,mt)
- end
- return tcopy
-end
-
-table.fastcopy = fastcopy
-table.copy = copy
-
--- rougly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack)
-
-function table.sub(t,i,j)
- return { unpack(t,i,j) }
-end
-
-function table.replace(a,b)
- for k,v in next, b do
- a[k] = v
- end
-end
-
--- slower than #t on indexed tables (#t only returns the size of the numerically indexed slice)
-
-function table.is_empty(t) -- obolete, use inline code instead
- return not t or not next(t)
-end
-
-function table.one_entry(t) -- obolete, use inline code instead
- local n = next(t)
- return n and not next(t,n)
-end
-
---~ function table.starts_at(t) -- obsolete, not nice
---~ return ipairs(t,1)(t,0)
---~ end
-
-function table.tohash(t,value)
- local h = { }
- if t then
- if value == nil then value = true end
- for _, v in next, t do -- no ipairs here
- h[v] = value
- end
- end
- return h
-end
-
-function table.fromhash(t)
- local h = { }
- for k, v in next, t do -- no ipairs here
- if v then h[#h+1] = k end
- end
- return h
-end
-
---~ print(table.serialize(t), "\n")
---~ print(table.serialize(t,"name"), "\n")
---~ print(table.serialize(t,false), "\n")
---~ print(table.serialize(t,true), "\n")
---~ print(table.serialize(t,"name",true), "\n")
---~ print(table.serialize(t,"name",true,true), "\n")
-
-table.serialize_functions = true
-table.serialize_compact = true
-table.serialize_inline = true
-
-local noquotes, hexify, handle, reduce, compact, inline, functions
-
-local reserved = table.tohash { -- intercept a language flaw, no reserved words as key
- 'and', 'break', 'do', 'else', 'elseif', 'end', 'false', 'for', 'function', 'if',
- 'in', 'local', 'nil', 'not', 'or', 'repeat', 'return', 'then', 'true', 'until', 'while',
-}
-
-local function simple_table(t)
- if #t > 0 then
- local n = 0
- for _,v in next, t do
- n = n + 1
- end
- if n == #t then
- local tt = { }
- for i=1,#t do
- local v = t[i]
- local tv = type(v)
- if tv == "number" then
- if hexify then
- tt[#tt+1] = format("0x%04X",v)
- else
- tt[#tt+1] = tostring(v) -- tostring not needed
- end
- elseif tv == "boolean" then
- tt[#tt+1] = tostring(v)
- elseif tv == "string" then
- tt[#tt+1] = format("%q",v)
- else
- tt = nil
- break
- end
- end
- return tt
- end
- end
- return nil
-end
-
--- Because this is a core function of mkiv I moved some function calls
--- inline.
---
--- twice as fast in a test:
---
--- local propername = lpeg.P(lpeg.R("AZ","az","__") * lpeg.R("09","AZ","az", "__")^0 * lpeg.P(-1) )
-
--- problem: there no good number_to_string converter with the best resolution
-
-local function do_serialize(root,name,depth,level,indexed)
- if level > 0 then
- depth = depth .. " "
- if indexed then
- handle(format("%s{",depth))
- elseif name then
- --~ handle(format("%s%s={",depth,key(name)))
- if type(name) == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s[0x%04X]={",depth,name))
- else
- handle(format("%s[%s]={",depth,name))
- end
- elseif noquotes and not reserved[name] and find(name,"^%a[%w%_]*$") then
- handle(format("%s%s={",depth,name))
- else
- handle(format("%s[%q]={",depth,name))
- end
- else
- handle(format("%s{",depth))
- end
- end
- -- we could check for k (index) being number (cardinal)
- if root and next(root) then
- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
- if compact then
- -- NOT: for k=1,#root do (we need to quit at nil)
- for k,v in ipairs(root) do -- can we use next?
- if not first then first = k end
- last = last + 1
- end
- end
- local sk = sortedkeys(root)
- for i=1,#sk do
- local k = sk[i]
- local v = root[k]
- --~ if v == root then
- -- circular
- --~ else
- local t = type(v)
- if compact and first and type(k) == "number" and k >= first and k <= last then
- if t == "number" then
- if hexify then
- handle(format("%s 0x%04X,",depth,v))
- else
- handle(format("%s %s,",depth,v)) -- %.99g
- end
- elseif t == "string" then
- if reduce and tonumber(v) then
- handle(format("%s %s,",depth,v))
- else
- handle(format("%s %q,",depth,v))
- end
- elseif t == "table" then
- if not next(v) then
- handle(format("%s {},",depth))
- elseif inline then -- and #t > 0
- local st = simple_table(v)
- if st then
- handle(format("%s { %s },",depth,concat(st,", ")))
- else
- do_serialize(v,k,depth,level+1,true)
- end
- else
- do_serialize(v,k,depth,level+1,true)
- end
- elseif t == "boolean" then
- handle(format("%s %s,",depth,tostring(v)))
- elseif t == "function" then
- if functions then
- handle(format('%s loadstring(%q),',depth,dump(v)))
- else
- handle(format('%s "function",',depth))
- end
- else
- handle(format("%s %q,",depth,tostring(v)))
- end
- elseif k == "__p__" then -- parent
- if false then
- handle(format("%s __p__=nil,",depth))
- end
- elseif t == "number" then
- --~ if hexify then
- --~ handle(format("%s %s=0x%04X,",depth,key(k),v))
- --~ else
- --~ handle(format("%s %s=%s,",depth,key(k),v)) -- %.99g
- --~ end
- if type(k) == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
- else
- handle(format("%s [%s]=%s,",depth,k,v)) -- %.99g
- end
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- if hexify then
- handle(format("%s %s=0x%04X,",depth,k,v))
- else
- handle(format("%s %s=%s,",depth,k,v)) -- %.99g
- end
- else
- if hexify then
- handle(format("%s [%q]=0x%04X,",depth,k,v))
- else
- handle(format("%s [%q]=%s,",depth,k,v)) -- %.99g
- end
- end
- elseif t == "string" then
- if reduce and tonumber(v) then
- --~ handle(format("%s %s=%s,",depth,key(k),v))
- if type(k) == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,v))
- else
- handle(format("%s [%s]=%s,",depth,k,v))
- end
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=%s,",depth,k,v))
- else
- handle(format("%s [%q]=%s,",depth,k,v))
- end
- else
- --~ handle(format("%s %s=%q,",depth,key(k),v))
- if type(k) == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,v))
- else
- handle(format("%s [%s]=%q,",depth,k,v))
- end
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=%q,",depth,k,v))
- else
- handle(format("%s [%q]=%q,",depth,k,v))
- end
- end
- elseif t == "table" then
- if not next(v) then
- --~ handle(format("%s %s={},",depth,key(k)))
- if type(k) == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]={},",depth,k))
- else
- handle(format("%s [%s]={},",depth,k))
- end
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s={},",depth,k))
- else
- handle(format("%s [%q]={},",depth,k))
- end
- elseif inline then
- local st = simple_table(v)
- if st then
- --~ handle(format("%s %s={ %s },",depth,key(k),concat(st,", ")))
- if type(k) == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
- else
- handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
- end
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s={ %s },",depth,k,concat(st,", ")))
- else
- handle(format("%s [%q]={ %s },",depth,k,concat(st,", ")))
- end
- else
- do_serialize(v,k,depth,level+1)
- end
- else
- do_serialize(v,k,depth,level+1)
- end
- elseif t == "boolean" then
- --~ handle(format("%s %s=%s,",depth,key(k),tostring(v)))
- if type(k) == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,tostring(v)))
- else
- handle(format("%s [%s]=%s,",depth,k,tostring(v)))
- end
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=%s,",depth,k,tostring(v)))
- else
- handle(format("%s [%q]=%s,",depth,k,tostring(v)))
- end
- elseif t == "function" then
- if functions then
- --~ handle(format('%s %s=loadstring(%q),',depth,key(k),dump(v)))
- if type(k) == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=loadstring(%q),",depth,k,dump(v)))
- else
- handle(format("%s [%s]=loadstring(%q),",depth,k,dump(v)))
- end
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=loadstring(%q),",depth,k,dump(v)))
- else
- handle(format("%s [%q]=loadstring(%q),",depth,k,dump(v)))
- end
- end
- else
- --~ handle(format("%s %s=%q,",depth,key(k),tostring(v)))
- if type(k) == "number" then -- or find(k,"^%d+$") then
- if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
- else
- handle(format("%s [%s]=%q,",depth,k,tostring(v)))
- end
- elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=%q,",depth,k,tostring(v)))
- else
- handle(format("%s [%q]=%q,",depth,k,tostring(v)))
- end
- end
- --~ end
- end
- end
- if level > 0 then
- handle(format("%s},",depth))
- end
-end
-
--- replacing handle by a direct t[#t+1] = ... (plus test) is not much
--- faster (0.03 on 1.00 for zapfino.tma)
-
-local function serialize(root,name,_handle,_reduce,_noquotes,_hexify)
- noquotes = _noquotes
- hexify = _hexify
- handle = _handle or print
- reduce = _reduce or false
- compact = table.serialize_compact
- inline = compact and table.serialize_inline
- functions = table.serialize_functions
- local tname = type(name)
- if tname == "string" then
- if name == "return" then
- handle("return {")
- else
- handle(name .. "={")
- end
- elseif tname == "number" then
- if hexify then
- handle(format("[0x%04X]={",name))
- else
- handle("[" .. name .. "]={")
- end
- elseif tname == "boolean" then
- if name then
- handle("return {")
- else
- handle("{")
- end
- else
- handle("t={")
- end
- if root and next(root) then
- do_serialize(root,name,"",0,indexed)
- end
- handle("}")
-end
-
---~ name:
---~
---~ true : return { }
---~ false : { }
---~ nil : t = { }
---~ string : string = { }
---~ 'return' : return { }
---~ number : [number] = { }
-
-function table.serialize(root,name,reduce,noquotes,hexify)
- local t = { }
- local function flush(s)
- t[#t+1] = s
- end
- serialize(root,name,flush,reduce,noquotes,hexify)
- return concat(t,"\n")
-end
-
-function table.tohandle(handle,root,name,reduce,noquotes,hexify)
- serialize(root,name,handle,reduce,noquotes,hexify)
-end
-
--- sometimes tables are real use (zapfino extra pro is some 85M) in which
--- case a stepwise serialization is nice; actually, we could consider:
---
--- for line in table.serializer(root,name,reduce,noquotes) do
--- ...(line)
--- end
---
--- so this is on the todo list
-
-table.tofile_maxtab = 2*1024
-
-function table.tofile(filename,root,name,reduce,noquotes,hexify)
- local f = io.open(filename,'w')
- if f then
- local maxtab = table.tofile_maxtab
- if maxtab > 1 then
- local t = { }
- local function flush(s)
- t[#t+1] = s
- if #t > maxtab then
- f:write(concat(t,"\n"),"\n") -- hm, write(sometable) should be nice
- t = { }
- end
- end
- serialize(root,name,flush,reduce,noquotes,hexify)
- f:write(concat(t,"\n"),"\n")
- else
- local function flush(s)
- f:write(s,"\n")
- end
- serialize(root,name,flush,reduce,noquotes,hexify)
- end
- f:close()
- end
-end
-
-local function flatten(t,f,complete) -- is this used? meybe a variant with next, ...
- for i=1,#t do
- local v = t[i]
- if type(v) == "table" then
- if complete or type(v[1]) == "table" then
- flatten(v,f,complete)
- else
- f[#f+1] = v
- end
- else
- f[#f+1] = v
- end
- end
-end
-
-function table.flatten(t)
- local f = { }
- flatten(t,f,true)
- return f
-end
-
-function table.unnest(t) -- bad name
- local f = { }
- flatten(t,f,false)
- return f
-end
-
-table.flatten_one_level = table.unnest
-
--- a better one:
-
-local function flattened(t,f)
- if not f then
- f = { }
- end
- for k, v in next, t do
- if type(v) == "table" then
- flattened(v,f)
- else
- f[k] = v
- end
- end
- return f
-end
-
-table.flattened = flattened
-
--- the next three may disappear
-
-function table.remove_value(t,value) -- todo: n
- if value then
- for i=1,#t do
- if t[i] == value then
- remove(t,i)
- -- remove all, so no: return
- end
- end
- end
-end
-
-function table.insert_before_value(t,value,str)
- if str then
- if value then
- for i=1,#t do
- if t[i] == value then
- insert(t,i,str)
- return
- end
- end
- end
- insert(t,1,str)
- elseif value then
- insert(t,1,value)
- end
-end
-
-function table.insert_after_value(t,value,str)
- if str then
- if value then
- for i=1,#t do
- if t[i] == value then
- insert(t,i+1,str)
- return
- end
- end
- end
- t[#t+1] = str
- elseif value then
- t[#t+1] = value
- end
-end
-
-local function are_equal(a,b,n,m) -- indexed
- if a and b and #a == #b then
- n = n or 1
- m = m or #a
- for i=n,m do
- local ai, bi = a[i], b[i]
- if ai==bi then
- -- same
- elseif type(ai)=="table" and type(bi)=="table" then
- if not are_equal(ai,bi) then
- return false
- end
- else
- return false
- end
- end
- return true
- else
- return false
- end
-end
-
-local function identical(a,b) -- assumes same structure
- for ka, va in next, a do
- local vb = b[k]
- if va == vb then
- -- same
- elseif type(va) == "table" and type(vb) == "table" then
- if not identical(va,vb) then
- return false
- end
- else
- return false
- end
- end
- return true
-end
-
-table.are_equal = are_equal
-table.identical = identical
-
--- maybe also make a combined one
-
-function table.compact(t)
- if t then
- for k,v in next, t do
- if not next(v) then
- t[k] = nil
- end
- end
- end
-end
-
-function table.contains(t, v)
- if t then
- for i=1, #t do
- if t[i] == v then
- return i
- end
- end
- end
- return false
-end
-
-function table.count(t)
- local n, e = 0, next(t)
- while e do
- n, e = n + 1, next(t,e)
- end
- return n
-end
-
-function table.swapped(t)
- local s = { }
- for k, v in next, t do
- s[v] = k
- end
- return s
-end
-
---~ function table.are_equal(a,b)
---~ return table.serialize(a) == table.serialize(b)
---~ end
-
-function table.clone(t,p) -- t is optional or nil or table
- if not p then
- t, p = { }, t or { }
- elseif not t then
- t = { }
- end
- setmetatable(t, { __index = function(_,key) return p[key] end }) -- why not __index = p ?
- return t
-end
-
-function table.hexed(t,seperator)
- local tt = { }
- for i=1,#t do tt[i] = format("0x%04X",t[i]) end
- return concat(tt,seperator or " ")
-end
-
-function table.reverse_hash(h)
- local r = { }
- for k,v in next, h do
- r[v] = lower(gsub(k," ",""))
- end
- return r
-end
-
-function table.reverse(t)
- local tt = { }
- if #t > 0 then
- for i=#t,1,-1 do
- tt[#tt+1] = t[i]
- end
- end
- return tt
-end
-
-function table.insert_before_value(t,value,extra)
- for i=1,#t do
- if t[i] == extra then
- remove(t,i)
- end
- end
- for i=1,#t do
- if t[i] == value then
- insert(t,i,extra)
- return
- end
- end
- insert(t,1,extra)
-end
-
-function table.insert_after_value(t,value,extra)
- for i=1,#t do
- if t[i] == extra then
- remove(t,i)
- end
- end
- for i=1,#t do
- if t[i] == value then
- insert(t,i+1,extra)
- return
- end
- end
- insert(t,#t+1,extra)
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['l-io'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local byte, find, gsub = string.byte, string.find, string.gsub
-
-if string.find(os.getenv("PATH"),";") then
- io.fileseparator, io.pathseparator = "\\", ";"
-else
- io.fileseparator, io.pathseparator = "/" , ":"
-end
-
-function io.loaddata(filename,textmode)
- local f = io.open(filename,(textmode and 'r') or 'rb')
- if f then
- -- collectgarbage("step") -- sometimes makes a big difference in mem consumption
- local data = f:read('*all')
- -- garbagecollector.check(data)
- f:close()
- return data
- else
- return nil
- end
-end
-
-function io.savedata(filename,data,joiner)
- local f = io.open(filename,"wb")
- if f then
- if type(data) == "table" then
- f:write(table.join(data,joiner or ""))
- elseif type(data) == "function" then
- data(f)
- else
- f:write(data or "")
- end
- f:close()
- return true
- else
- return false
- end
-end
-
-function io.exists(filename)
- local f = io.open(filename)
- if f == nil then
- return false
- else
- assert(f:close())
- return true
- end
-end
-
-function io.size(filename)
- local f = io.open(filename)
- if f == nil then
- return 0
- else
- local s = f:seek("end")
- assert(f:close())
- return s
- end
-end
-
-function io.noflines(f)
- local n = 0
- for _ in f:lines() do
- n = n + 1
- end
- f:seek('set',0)
- return n
-end
-
-local nextchar = {
- [ 4] = function(f)
- return f:read(1,1,1,1)
- end,
- [ 2] = function(f)
- return f:read(1,1)
- end,
- [ 1] = function(f)
- return f:read(1)
- end,
- [-2] = function(f)
- local a, b = f:read(1,1)
- return b, a
- end,
- [-4] = function(f)
- local a, b, c, d = f:read(1,1,1,1)
- return d, c, b, a
- end
-}
-
-function io.characters(f,n)
- if f then
- return nextchar[n or 1], f
- else
- return nil, nil
- end
-end
-
-local nextbyte = {
- [4] = function(f)
- local a, b, c, d = f:read(1,1,1,1)
- if d then
- return byte(a), byte(b), byte(c), byte(d)
- else
- return nil, nil, nil, nil
- end
- end,
- [2] = function(f)
- local a, b = f:read(1,1)
- if b then
- return byte(a), byte(b)
- else
- return nil, nil
- end
- end,
- [1] = function (f)
- local a = f:read(1)
- if a then
- return byte(a)
- else
- return nil
- end
- end,
- [-2] = function (f)
- local a, b = f:read(1,1)
- if b then
- return byte(b), byte(a)
- else
- return nil, nil
- end
- end,
- [-4] = function(f)
- local a, b, c, d = f:read(1,1,1,1)
- if d then
- return byte(d), byte(c), byte(b), byte(a)
- else
- return nil, nil, nil, nil
- end
- end
-}
-
-function io.bytes(f,n)
- if f then
- return nextbyte[n or 1], f
- else
- return nil, nil
- end
-end
-
-function io.ask(question,default,options)
- while true do
- io.write(question)
- if options then
- io.write(string.format(" [%s]",table.concat(options,"|")))
- end
- if default then
- io.write(string.format(" [%s]",default))
- end
- io.write(string.format(" "))
- local answer = io.read()
- answer = gsub(answer,"^%s*(.*)%s*$","%1")
- if answer == "" and default then
- return default
- elseif not options then
- return answer
- else
- for k=1,#options do
- if options[k] == answer then
- return answer
- end
- end
- local pattern = "^" .. answer
- for k=1,#options do
- local v = options[k]
- if find(v,pattern) then
- return v
- end
- end
- end
- end
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['l-number'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local tostring = tostring
-local format, floor, insert, match = string.format, math.floor, table.insert, string.match
-local lpegmatch = lpeg.match
-
-number = number or { }
-
--- a,b,c,d,e,f = number.toset(100101)
-
-function number.toset(n)
- return match(tostring(n),"(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)")
-end
-
-function number.toevenhex(n)
- local s = format("%X",n)
- if #s % 2 == 0 then
- return s
- else
- return "0" .. s
- end
-end
-
--- the lpeg way is slower on 8 digits, but faster on 4 digits, some 7.5%
--- on
---
--- for i=1,1000000 do
--- local a,b,c,d,e,f,g,h = number.toset(12345678)
--- local a,b,c,d = number.toset(1234)
--- local a,b,c = number.toset(123)
--- end
---
--- of course dedicated "(.)(.)(.)(.)" matches are even faster
-
-local one = lpeg.C(1-lpeg.S(''))^1
-
-function number.toset(n)
- return lpegmatch(one,tostring(n))
-end
-
-function number.bits(n,zero)
- local t, i = { }, (zero and 0) or 1
- while n > 0 do
- local m = n % 2
- if m > 0 then
- insert(t,1,i)
- end
- n = floor(n/2)
- i = i + 1
- end
- return t
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['l-set'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-set = set or { }
-
-local nums = { }
-local tabs = { }
-local concat = table.concat
-local next, type = next, type
-
-set.create = table.tohash
-
-function set.tonumber(t)
- if next(t) then
- local s = ""
- -- we could save mem by sorting, but it slows down
- for k, v in next, t do
- if v then
- -- why bother about the leading space
- s = s .. " " .. k
- end
- end
- local n = nums[s]
- if not n then
- n = #tabs + 1
- tabs[n] = t
- nums[s] = n
- end
- return n
- else
- return 0
- end
-end
-
-function set.totable(n)
- if n == 0 then
- return { }
- else
- return tabs[n] or { }
- end
-end
-
-function set.tolist(n)
- if n == 0 or not tabs[n] then
- return ""
- else
- local t = { }
- for k, v in next, tabs[n] do
- if v then
- t[#t+1] = k
- end
- end
- return concat(t," ")
- end
-end
-
-function set.contains(n,s)
- if type(n) == "table" then
- return n[s]
- elseif n == 0 then
- return false
- else
- local t = tabs[n]
- return t and t[s]
- end
-end
-
---~ local c = set.create{'aap','noot','mies'}
---~ local s = set.tonumber(c)
---~ local t = set.totable(s)
---~ print(t['aap'])
---~ local c = set.create{'zus','wim','jet'}
---~ local s = set.tonumber(c)
---~ local t = set.totable(s)
---~ print(t['aap'])
---~ print(t['jet'])
---~ print(set.contains(t,'jet'))
---~ print(set.contains(t,'aap'))
-
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['l-os'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- maybe build io.flush in os.execute
-
-local find, format, gsub = string.find, string.format, string.gsub
-local random, ceil = math.random, math.ceil
-
-local execute, spawn, exec, ioflush = os.execute, os.spawn or os.execute, os.exec or os.execute, io.flush
-
-function os.execute(...) ioflush() return execute(...) end
-function os.spawn (...) ioflush() return spawn (...) end
-function os.exec (...) ioflush() return exec (...) end
-
-function os.resultof(command)
- ioflush() -- else messed up logging
- local handle = io.popen(command,"r")
- if not handle then
- -- print("unknown command '".. command .. "' in os.resultof")
- return ""
- else
- return handle:read("*all") or ""
- end
-end
-
---~ os.type : windows | unix (new, we already guessed os.platform)
---~ os.name : windows | msdos | linux | macosx | solaris | .. | generic (new)
---~ os.platform : extended os.name with architecture
-
-if not io.fileseparator then
- if find(os.getenv("PATH"),";") then
- io.fileseparator, io.pathseparator, os.type = "\\", ";", os.type or "mswin"
- else
- io.fileseparator, io.pathseparator, os.type = "/" , ":", os.type or "unix"
- end
-end
-
-os.type = os.type or (io.pathseparator == ";" and "windows") or "unix"
-os.name = os.name or (os.type == "windows" and "mswin" ) or "linux"
-
-if os.type == "windows" then
- os.libsuffix, os.binsuffix = 'dll', 'exe'
-else
- os.libsuffix, os.binsuffix = 'so', ''
-end
-
-function os.launch(str)
- if os.type == "windows" then
- os.execute("start " .. str) -- os.spawn ?
- else
- os.execute(str .. " &") -- os.spawn ?
- end
-end
-
-if not os.times then
- -- utime = user time
- -- stime = system time
- -- cutime = children user time
- -- cstime = children system time
- function os.times()
- return {
- utime = os.gettimeofday(), -- user
- stime = 0, -- system
- cutime = 0, -- children user
- cstime = 0, -- children system
- }
- end
-end
-
-os.gettimeofday = os.gettimeofday or os.clock
-
-local startuptime = os.gettimeofday()
-
-function os.runtime()
- return os.gettimeofday() - startuptime
-end
-
---~ print(os.gettimeofday()-os.time())
---~ os.sleep(1.234)
---~ print (">>",os.runtime())
---~ print(os.date("%H:%M:%S",os.gettimeofday()))
---~ print(os.date("%H:%M:%S",os.time()))
-
--- no need for function anymore as we have more clever code and helpers now
--- this metatable trickery might as well disappear
-
-os.resolvers = os.resolvers or { }
-
-local resolvers = os.resolvers
-
-local osmt = getmetatable(os) or { __index = function(t,k) t[k] = "unset" return "unset" end } -- maybe nil
-local osix = osmt.__index
-
-osmt.__index = function(t,k)
- return (resolvers[k] or osix)(t,k)
-end
-
-setmetatable(os,osmt)
-
-if not os.setenv then
-
- -- we still store them but they won't be seen in
- -- child processes although we might pass them some day
- -- using command concatination
-
- local env, getenv = { }, os.getenv
-
- function os.setenv(k,v)
- env[k] = v
- end
-
- function os.getenv(k)
- return env[k] or getenv(k)
- end
-
-end
-
--- we can use HOSTTYPE on some platforms
-
-local name, platform = os.name or "linux", os.getenv("MTX_PLATFORM") or ""
-
-local function guess()
- local architecture = os.resultof("uname -m") or ""
- if architecture ~= "" then
- return architecture
- end
- architecture = os.getenv("HOSTTYPE") or ""
- if architecture ~= "" then
- return architecture
- end
- return os.resultof("echo $HOSTTYPE") or ""
-end
-
-if platform ~= "" then
-
- os.platform = platform
-
-elseif os.type == "windows" then
-
- -- we could set the variable directly, no function needed here
-
- function os.resolvers.platform(t,k)
- local platform, architecture = "", os.getenv("PROCESSOR_ARCHITECTURE") or ""
- if find(architecture,"AMD64") then
- platform = "mswin-64"
- else
- platform = "mswin"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-elseif name == "linux" then
-
- function os.resolvers.platform(t,k)
- -- we sometims have HOSTTYPE set so let's check that first
- local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
- if find(architecture,"x86_64") then
- platform = "linux-64"
- elseif find(architecture,"ppc") then
- platform = "linux-ppc"
- else
- platform = "linux"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-elseif name == "macosx" then
-
- --[[
- Identifying the architecture of OSX is quite a mess and this
- is the best we can come up with. For some reason $HOSTTYPE is
- a kind of pseudo environment variable, not known to the current
- environment. And yes, uname cannot be trusted either, so there
- is a change that you end up with a 32 bit run on a 64 bit system.
- Also, some proper 64 bit intel macs are too cheap (low-end) and
- therefore not permitted to run the 64 bit kernel.
- ]]--
-
- function os.resolvers.platform(t,k)
- -- local platform, architecture = "", os.getenv("HOSTTYPE") or ""
- -- if architecture == "" then
- -- architecture = os.resultof("echo $HOSTTYPE") or ""
- -- end
- local platform, architecture = "", os.resultof("echo $HOSTTYPE") or ""
- if architecture == "" then
- -- print("\nI have no clue what kind of OSX you're running so let's assume an 32 bit intel.\n")
- platform = "osx-intel"
- elseif find(architecture,"i386") then
- platform = "osx-intel"
- elseif find(architecture,"x86_64") then
- platform = "osx-64"
- else
- platform = "osx-ppc"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-elseif name == "sunos" then
-
- function os.resolvers.platform(t,k)
- local platform, architecture = "", os.resultof("uname -m") or ""
- if find(architecture,"sparc") then
- platform = "solaris-sparc"
- else -- if architecture == 'i86pc'
- platform = "solaris-intel"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-elseif name == "freebsd" then
-
- function os.resolvers.platform(t,k)
- local platform, architecture = "", os.resultof("uname -m") or ""
- if find(architecture,"amd64") then
- platform = "freebsd-amd64"
- else
- platform = "freebsd"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-elseif name == "kfreebsd" then
-
- function os.resolvers.platform(t,k)
- -- we sometims have HOSTTYPE set so let's check that first
- local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
- if find(architecture,"x86_64") then
- platform = "kfreebsd-64"
- else
- platform = "kfreebsd-i386"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-else
-
- -- platform = "linux"
- -- os.setenv("MTX_PLATFORM",platform)
- -- os.platform = platform
-
- function os.resolvers.platform(t,k)
- local platform = "linux"
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-end
-
--- beware, we set the randomseed
-
--- from wikipedia: Version 4 UUIDs use a scheme relying only on random numbers. This algorithm sets the
--- version number as well as two reserved bits. All other bits are set using a random or pseudorandom
--- data source. Version 4 UUIDs have the form xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx with hexadecimal
--- digits x and hexadecimal digits 8, 9, A, or B for y. e.g. f47ac10b-58cc-4372-a567-0e02b2c3d479.
---
--- as we don't call this function too often there is not so much risk on repetition
-
-local t = { 8, 9, "a", "b" }
-
-function os.uuid()
- return format("%04x%04x-4%03x-%s%03x-%04x-%04x%04x%04x",
- random(0xFFFF),random(0xFFFF),
- random(0x0FFF),
- t[ceil(random(4))] or 8,random(0x0FFF),
- random(0xFFFF),
- random(0xFFFF),random(0xFFFF),random(0xFFFF)
- )
-end
-
-local d
-
-function os.timezone(delta)
- d = d or tonumber(tonumber(os.date("%H")-os.date("!%H")))
- if delta then
- if d > 0 then
- return format("+%02i:00",d)
- else
- return format("-%02i:00",-d)
- end
- else
- return 1
- end
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['l-file'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- needs a cleanup
-
-file = file or { }
-
-local concat = table.concat
-local find, gmatch, match, gsub, sub, char = string.find, string.gmatch, string.match, string.gsub, string.sub, string.char
-local lpegmatch = lpeg.match
-
-function file.removesuffix(filename)
- return (gsub(filename,"%.[%a%d]+$",""))
-end
-
-function file.addsuffix(filename, suffix)
- if not suffix or suffix == "" then
- return filename
- elseif not find(filename,"%.[%a%d]+$") then
- return filename .. "." .. suffix
- else
- return filename
- end
-end
-
-function file.replacesuffix(filename, suffix)
- return (gsub(filename,"%.[%a%d]+$","")) .. "." .. suffix
-end
-
-function file.dirname(name,default)
- return match(name,"^(.+)[/\\].-$") or (default or "")
-end
-
-function file.basename(name)
- return match(name,"^.+[/\\](.-)$") or name
-end
-
-function file.nameonly(name)
- return (gsub(match(name,"^.+[/\\](.-)$") or name,"%..*$",""))
-end
-
-function file.extname(name,default)
- return match(name,"^.+%.([^/\\]-)$") or default or ""
-end
-
-file.suffix = file.extname
-
---~ function file.join(...)
---~ local pth = concat({...},"/")
---~ pth = gsub(pth,"\\","/")
---~ local a, b = match(pth,"^(.*://)(.*)$")
---~ if a and b then
---~ return a .. gsub(b,"//+","/")
---~ end
---~ a, b = match(pth,"^(//)(.*)$")
---~ if a and b then
---~ return a .. gsub(b,"//+","/")
---~ end
---~ return (gsub(pth,"//+","/"))
---~ end
-
-local trick_1 = char(1)
-local trick_2 = "^" .. trick_1 .. "/+"
-
-function file.join(...)
- local lst = { ... }
- local a, b = lst[1], lst[2]
- if a == "" then
- lst[1] = trick_1
- elseif b and find(a,"^/+$") and find(b,"^/") then
- lst[1] = ""
- lst[2] = gsub(b,"^/+","")
- end
- local pth = concat(lst,"/")
- pth = gsub(pth,"\\","/")
- local a, b = match(pth,"^(.*://)(.*)$")
- if a and b then
- return a .. gsub(b,"//+","/")
- end
- a, b = match(pth,"^(//)(.*)$")
- if a and b then
- return a .. gsub(b,"//+","/")
- end
- pth = gsub(pth,trick_2,"")
- return (gsub(pth,"//+","/"))
-end
-
---~ print(file.join("//","/y"))
---~ print(file.join("/","/y"))
---~ print(file.join("","/y"))
---~ print(file.join("/x/","/y"))
---~ print(file.join("x/","/y"))
---~ print(file.join("http://","/y"))
---~ print(file.join("http://a","/y"))
---~ print(file.join("http:///a","/y"))
---~ print(file.join("//nas-1","/y"))
-
-function file.iswritable(name)
- local a = lfs.attributes(name) or lfs.attributes(file.dirname(name,"."))
- return a and sub(a.permissions,2,2) == "w"
-end
-
-function file.isreadable(name)
- local a = lfs.attributes(name)
- return a and sub(a.permissions,1,1) == "r"
-end
-
-file.is_readable = file.isreadable
-file.is_writable = file.iswritable
-
--- todo: lpeg
-
---~ function file.split_path(str)
---~ local t = { }
---~ str = gsub(str,"\\", "/")
---~ str = gsub(str,"(%a):([;/])", "%1\001%2")
---~ for name in gmatch(str,"([^;:]+)") do
---~ if name ~= "" then
---~ t[#t+1] = gsub(name,"\001",":")
---~ end
---~ end
---~ return t
---~ end
-
-local checkedsplit = string.checkedsplit
-
-function file.split_path(str,separator)
- str = gsub(str,"\\","/")
- return checkedsplit(str,separator or io.pathseparator)
-end
-
-function file.join_path(tab)
- return concat(tab,io.pathseparator) -- can have trailing //
-end
-
--- we can hash them weakly
-
-function file.collapse_path(str)
- str = gsub(str,"\\","/")
- if find(str,"/") then
- str = gsub(str,"^%./",(gsub(lfs.currentdir(),"\\","/")) .. "/") -- ./xx in qualified
- str = gsub(str,"/%./","/")
- local n, m = 1, 1
- while n > 0 or m > 0 do
- str, n = gsub(str,"[^/%.]+/%.%.$","")
- str, m = gsub(str,"[^/%.]+/%.%./","")
- end
- str = gsub(str,"([^/])/$","%1")
- -- str = gsub(str,"^%./","") -- ./xx in qualified
- str = gsub(str,"/%.$","")
- end
- if str == "" then str = "." end
- return str
-end
-
---~ print(file.collapse_path("/a"))
---~ print(file.collapse_path("a/./b/.."))
---~ print(file.collapse_path("a/aa/../b/bb"))
---~ print(file.collapse_path("a/../.."))
---~ print(file.collapse_path("a/.././././b/.."))
---~ print(file.collapse_path("a/./././b/.."))
---~ print(file.collapse_path("a/b/c/../.."))
-
-function file.robustname(str)
- return (gsub(str,"[^%a%d%/%-%.\\]+","-"))
-end
-
-file.readdata = io.loaddata
-file.savedata = io.savedata
-
-function file.copy(oldname,newname)
- file.savedata(newname,io.loaddata(oldname))
-end
-
--- lpeg variants, slightly faster, not always
-
---~ local period = lpeg.P(".")
---~ local slashes = lpeg.S("\\/")
---~ local noperiod = 1-period
---~ local noslashes = 1-slashes
---~ local name = noperiod^1
-
---~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.C(noperiod^1) * -1
-
---~ function file.extname(name)
---~ return lpegmatch(pattern,name) or ""
---~ end
-
---~ local pattern = lpeg.Cs(((period * noperiod^1 * -1)/"" + 1)^1)
-
---~ function file.removesuffix(name)
---~ return lpegmatch(pattern,name)
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^1 * lpeg.C(noslashes^1) * -1
-
---~ function file.basename(name)
---~ return lpegmatch(pattern,name) or name
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^1 * lpeg.Cp() * noslashes^1 * -1
-
---~ function file.dirname(name)
---~ local p = lpegmatch(pattern,name)
---~ if p then
---~ return sub(name,1,p-2)
---~ else
---~ return ""
---~ end
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.Cp() * noperiod^1 * -1
-
---~ function file.addsuffix(name, suffix)
---~ local p = lpegmatch(pattern,name)
---~ if p then
---~ return name
---~ else
---~ return name .. "." .. suffix
---~ end
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.Cp() * noperiod^1 * -1
-
---~ function file.replacesuffix(name,suffix)
---~ local p = lpegmatch(pattern,name)
---~ if p then
---~ return sub(name,1,p-2) .. "." .. suffix
---~ else
---~ return name .. "." .. suffix
---~ end
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^0 * lpeg.Cp() * ((noperiod^1 * period)^1 * lpeg.Cp() + lpeg.P(true)) * noperiod^1 * -1
-
---~ function file.nameonly(name)
---~ local a, b = lpegmatch(pattern,name)
---~ if b then
---~ return sub(name,a,b-2)
---~ elseif a then
---~ return sub(name,a)
---~ else
---~ return name
---~ end
---~ end
-
---~ local test = file.extname
---~ local test = file.basename
---~ local test = file.dirname
---~ local test = file.addsuffix
---~ local test = file.replacesuffix
---~ local test = file.nameonly
-
---~ print(1,test("./a/b/c/abd.def.xxx","!!!"))
---~ print(2,test("./../b/c/abd.def.xxx","!!!"))
---~ print(3,test("a/b/c/abd.def.xxx","!!!"))
---~ print(4,test("a/b/c/def.xxx","!!!"))
---~ print(5,test("a/b/c/def","!!!"))
---~ print(6,test("def","!!!"))
---~ print(7,test("def.xxx","!!!"))
-
---~ local tim = os.clock() for i=1,250000 do local ext = test("abd.def.xxx","!!!") end print(os.clock()-tim)
-
--- also rewrite previous
-
-local letter = lpeg.R("az","AZ") + lpeg.S("_-+")
-local separator = lpeg.P("://")
-
-local qualified = lpeg.P(".")^0 * lpeg.P("/") + letter*lpeg.P(":") + letter^1*separator + letter^1 * lpeg.P("/")
-local rootbased = lpeg.P("/") + letter*lpeg.P(":")
-
--- ./name ../name /name c: :// name/name
-
-function file.is_qualified_path(filename)
- return lpegmatch(qualified,filename) ~= nil
-end
-
-function file.is_rootbased_path(filename)
- return lpegmatch(rootbased,filename) ~= nil
-end
-
-local slash = lpeg.S("\\/")
-local period = lpeg.P(".")
-local drive = lpeg.C(lpeg.R("az","AZ")) * lpeg.P(":")
-local path = lpeg.C(((1-slash)^0 * slash)^0)
-local suffix = period * lpeg.C(lpeg.P(1-period)^0 * lpeg.P(-1))
-local base = lpeg.C((1-suffix)^0)
-
-local pattern = (drive + lpeg.Cc("")) * (path + lpeg.Cc("")) * (base + lpeg.Cc("")) * (suffix + lpeg.Cc(""))
-
-function file.splitname(str) -- returns drive, path, base, suffix
- return lpegmatch(pattern,str)
-end
-
--- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
---
--- test { "c:", "c:/aa", "c:/aa/bb", "c:/aa/bb/cc", "c:/aa/bb/cc.dd", "c:/aa/bb/cc.dd.ee" }
--- test { "c:", "c:aa", "c:aa/bb", "c:aa/bb/cc", "c:aa/bb/cc.dd", "c:aa/bb/cc.dd.ee" }
--- test { "/aa", "/aa/bb", "/aa/bb/cc", "/aa/bb/cc.dd", "/aa/bb/cc.dd.ee" }
--- test { "aa", "aa/bb", "aa/bb/cc", "aa/bb/cc.dd", "aa/bb/cc.dd.ee" }
-
---~ -- todo:
---~
---~ if os.type == "windows" then
---~ local currentdir = lfs.currentdir
---~ function lfs.currentdir()
---~ return (gsub(currentdir(),"\\","/"))
---~ end
---~ end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['l-md5'] = {
- version = 1.001,
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This also provides file checksums and checkers.
-
-local gsub, format, byte = string.gsub, string.format, string.byte
-
-local function convert(str,fmt)
- return (gsub(md5.sum(str),".",function(chr) return format(fmt,byte(chr)) end))
-end
-
-if not md5.HEX then function md5.HEX(str) return convert(str,"%02X") end end
-if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end
-if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end
-
---~ if not md5.HEX then
---~ local function remap(chr) return format("%02X",byte(chr)) end
---~ function md5.HEX(str) return (gsub(md5.sum(str),".",remap)) end
---~ end
---~ if not md5.hex then
---~ local function remap(chr) return format("%02x",byte(chr)) end
---~ function md5.hex(str) return (gsub(md5.sum(str),".",remap)) end
---~ end
---~ if not md5.dec then
---~ local function remap(chr) return format("%03i",byte(chr)) end
---~ function md5.dec(str) return (gsub(md5.sum(str),".",remap)) end
---~ end
-
-file.needs_updating_threshold = 1
-
-function file.needs_updating(oldname,newname) -- size modification access change
- local oldtime = lfs.attributes(oldname, modification)
- local newtime = lfs.attributes(newname, modification)
- if newtime >= oldtime then
- return false
- elseif oldtime - newtime < file.needs_updating_threshold then
- return false
- else
- return true
- end
-end
-
-function file.checksum(name)
- if md5 then
- local data = io.loaddata(name)
- if data then
- return md5.HEX(data)
- end
- end
- return nil
-end
-
-function file.loadchecksum(name)
- if md5 then
- local data = io.loaddata(name .. ".md5")
- return data and (gsub(data,"%s",""))
- end
- return nil
-end
-
-function file.savechecksum(name, checksum)
- if not checksum then checksum = file.checksum(name) end
- if checksum then
- io.savedata(name .. ".md5",checksum)
- return checksum
- end
- return nil
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['l-url'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local char, gmatch, gsub = string.char, string.gmatch, string.gsub
-local tonumber, type = tonumber, type
-local lpegmatch = lpeg.match
-
--- from the spec (on the web):
---
--- foo://example.com:8042/over/there?name=ferret#nose
--- \_/ \______________/\_________/ \_________/ \__/
--- | | | | |
--- scheme authority path query fragment
--- | _____________________|__
--- / \ / \
--- urn:example:animal:ferret:nose
-
-url = url or { }
-
-local function tochar(s)
- return char(tonumber(s,16))
-end
-
-local colon, qmark, hash, slash, percent, endofstring = lpeg.P(":"), lpeg.P("?"), lpeg.P("#"), lpeg.P("/"), lpeg.P("%"), lpeg.P(-1)
-
-local hexdigit = lpeg.R("09","AF","af")
-local plus = lpeg.P("+")
-local escaped = (plus / " ") + (percent * lpeg.C(hexdigit * hexdigit) / tochar)
-
--- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
-
-local scheme = lpeg.Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon + lpeg.Cc("")
-local authority = slash * slash * lpeg.Cs((escaped+(1- slash-qmark-hash))^0) + lpeg.Cc("")
-local path = slash * lpeg.Cs((escaped+(1- qmark-hash))^0) + lpeg.Cc("")
-local query = qmark * lpeg.Cs((escaped+(1- hash))^0) + lpeg.Cc("")
-local fragment = hash * lpeg.Cs((escaped+(1- endofstring))^0) + lpeg.Cc("")
-
-local parser = lpeg.Ct(scheme * authority * path * query * fragment)
-
--- todo: reconsider Ct as we can as well have five return values (saves a table)
--- so we can have two parsers, one with and one without
-
-function url.split(str)
- return (type(str) == "string" and lpegmatch(parser,str)) or str
-end
-
--- todo: cache them
-
-function url.hashed(str)
- local s = url.split(str)
- local somescheme = s[1] ~= ""
- return {
- scheme = (somescheme and s[1]) or "file",
- authority = s[2],
- path = s[3],
- query = s[4],
- fragment = s[5],
- original = str,
- noscheme = not somescheme,
- }
-end
-
-function url.hasscheme(str)
- return url.split(str)[1] ~= ""
-end
-
-function url.addscheme(str,scheme)
- return (url.hasscheme(str) and str) or ((scheme or "file:///") .. str)
-end
-
-function url.construct(hash)
- local fullurl = hash.sheme .. "://".. hash.authority .. hash.path
- if hash.query then
- fullurl = fullurl .. "?".. hash.query
- end
- if hash.fragment then
- fullurl = fullurl .. "?".. hash.fragment
- end
- return fullurl
-end
-
-function url.filename(filename)
- local t = url.hashed(filename)
- return (t.scheme == "file" and (gsub(t.path,"^/([a-zA-Z])([:|])/)","%1:"))) or filename
-end
-
-function url.query(str)
- if type(str) == "string" then
- local t = { }
- for k, v in gmatch(str,"([^&=]*)=([^&=]*)") do
- t[k] = v
- end
- return t
- else
- return str
- end
-end
-
---~ print(url.filename("file:///c:/oeps.txt"))
---~ print(url.filename("c:/oeps.txt"))
---~ print(url.filename("file:///oeps.txt"))
---~ print(url.filename("file:///etc/test.txt"))
---~ print(url.filename("/oeps.txt"))
-
---~ from the spec on the web (sort of):
---~
---~ function test(str)
---~ print(table.serialize(url.hashed(str)))
---~ end
---~
---~ test("%56pass%20words")
---~ test("file:///c:/oeps.txt")
---~ test("file:///c|/oeps.txt")
---~ test("file:///etc/oeps.txt")
---~ test("file://./etc/oeps.txt")
---~ test("file:////etc/oeps.txt")
---~ test("ftp://ftp.is.co.za/rfc/rfc1808.txt")
---~ test("http://www.ietf.org/rfc/rfc2396.txt")
---~ test("ldap://[2001:db8::7]/c=GB?objectClass?one#what")
---~ test("mailto:John.Doe@example.com")
---~ test("news:comp.infosystems.www.servers.unix")
---~ test("tel:+1-816-555-1212")
---~ test("telnet://192.0.2.16:80/")
---~ test("urn:oasis:names:specification:docbook:dtd:xml:4.1.2")
---~ test("/etc/passwords")
---~ test("http://www.pragma-ade.com/spaced%20name")
-
---~ test("zip:///oeps/oeps.zip#bla/bla.tex")
---~ test("zip:///oeps/oeps.zip?bla/bla.tex")
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['l-dir'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- dir.expand_name will be merged with cleanpath and collapsepath
-
-local type = type
-local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub
-local lpegmatch = lpeg.match
-
-dir = dir or { }
-
--- handy
-
-function dir.current()
- return (gsub(lfs.currentdir(),"\\","/"))
-end
-
--- optimizing for no string.find (*) does not save time
-
-local attributes = lfs.attributes
-local walkdir = lfs.dir
-
-local function glob_pattern(path,patt,recurse,action)
- local ok, scanner
- if path == "/" then
- ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
- else
- ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
- end
- if ok and type(scanner) == "function" then
- if not find(path,"/$") then path = path .. '/' end
- for name in scanner do
- local full = path .. name
- local mode = attributes(full,'mode')
- if mode == 'file' then
- if find(full,patt) then
- action(full)
- end
- elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
- glob_pattern(full,patt,recurse,action)
- end
- end
- end
-end
-
-dir.glob_pattern = glob_pattern
-
-local function collect_pattern(path,patt,recurse,result)
- local ok, scanner
- result = result or { }
- if path == "/" then
- ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
- else
- ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
- end
- if ok and type(scanner) == "function" then
- if not find(path,"/$") then path = path .. '/' end
- for name in scanner do
- local full = path .. name
- local attr = attributes(full)
- local mode = attr.mode
- if mode == 'file' then
- if find(full,patt) then
- result[name] = attr
- end
- elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
- attr.list = collect_pattern(full,patt,recurse)
- result[name] = attr
- end
- end
- end
- return result
-end
-
-dir.collect_pattern = collect_pattern
-
-local P, S, R, C, Cc, Cs, Ct, Cv, V = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Cv, lpeg.V
-
-local pattern = Ct {
- [1] = (C(P(".") + P("/")^1) + C(R("az","AZ") * P(":") * P("/")^0) + Cc("./")) * V(2) * V(3),
- [2] = C(((1-S("*?/"))^0 * P("/"))^0),
- [3] = C(P(1)^0)
-}
-
-local filter = Cs ( (
- P("**") / ".*" +
- P("*") / "[^/]*" +
- P("?") / "[^/]" +
- P(".") / "%%." +
- P("+") / "%%+" +
- P("-") / "%%-" +
- P(1)
-)^0 )
-
-local function glob(str,t)
- if type(t) == "function" then
- if type(str) == "table" then
- for s=1,#str do
- glob(str[s],t)
- end
- elseif lfs.isfile(str) then
- t(str)
- else
- local split = lpegmatch(pattern,str)
- if split then
- local root, path, base = split[1], split[2], split[3]
- local recurse = find(base,"%*%*")
- local start = root .. path
- local result = lpegmatch(filter,start .. base)
- glob_pattern(start,result,recurse,t)
- end
- end
- else
- if type(str) == "table" then
- local t = t or { }
- for s=1,#str do
- glob(str[s],t)
- end
- return t
- elseif lfs.isfile(str) then
- local t = t or { }
- t[#t+1] = str
- return t
- else
- local split = lpegmatch(pattern,str)
- if split then
- local t = t or { }
- local action = action or function(name) t[#t+1] = name end
- local root, path, base = split[1], split[2], split[3]
- local recurse = find(base,"%*%*")
- local start = root .. path
- local result = lpegmatch(filter,start .. base)
- glob_pattern(start,result,recurse,action)
- return t
- else
- return { }
- end
- end
- end
-end
-
-dir.glob = glob
-
---~ list = dir.glob("**/*.tif")
---~ list = dir.glob("/**/*.tif")
---~ list = dir.glob("./**/*.tif")
---~ list = dir.glob("oeps/**/*.tif")
---~ list = dir.glob("/oeps/**/*.tif")
-
-local function globfiles(path,recurse,func,files) -- func == pattern or function
- if type(func) == "string" then
- local s = func -- alas, we need this indirect way
- func = function(name) return find(name,s) end
- end
- files = files or { }
- for name in walkdir(path) do
- if find(name,"^%.") then
- --- skip
- else
- local mode = attributes(name,'mode')
- if mode == "directory" then
- if recurse then
- globfiles(path .. "/" .. name,recurse,func,files)
- end
- elseif mode == "file" then
- if func then
- if func(name) then
- files[#files+1] = path .. "/" .. name
- end
- else
- files[#files+1] = path .. "/" .. name
- end
- end
- end
- end
- return files
-end
-
-dir.globfiles = globfiles
-
--- t = dir.glob("c:/data/develop/context/sources/**/????-*.tex")
--- t = dir.glob("c:/data/develop/tex/texmf/**/*.tex")
--- t = dir.glob("c:/data/develop/context/texmf/**/*.tex")
--- t = dir.glob("f:/minimal/tex/**/*")
--- print(dir.ls("f:/minimal/tex/**/*"))
--- print(dir.ls("*.tex"))
-
-function dir.ls(pattern)
- return table.concat(glob(pattern),"\n")
-end
-
---~ mkdirs("temp")
---~ mkdirs("a/b/c")
---~ mkdirs(".","/a/b/c")
---~ mkdirs("a","b","c")
-
-local make_indeed = true -- false
-
-if string.find(os.getenv("PATH"),";") then -- os.type == "windows"
-
- function dir.mkdirs(...)
- local str, pth, t = "", "", { ... }
- for i=1,#t do
- local s = t[i]
- if s ~= "" then
- if str ~= "" then
- str = str .. "/" .. s
- else
- str = s
- end
- end
- end
- local first, middle, last
- local drive = false
- first, middle, last = match(str,"^(//)(//*)(.*)$")
- if first then
- -- empty network path == local path
- else
- first, last = match(str,"^(//)/*(.-)$")
- if first then
- middle, last = match(str,"([^/]+)/+(.-)$")
- if middle then
- pth = "//" .. middle
- else
- pth = "//" .. last
- last = ""
- end
- else
- first, middle, last = match(str,"^([a-zA-Z]:)(/*)(.-)$")
- if first then
- pth, drive = first .. middle, true
- else
- middle, last = match(str,"^(/*)(.-)$")
- if not middle then
- last = str
- end
- end
- end
- end
- for s in gmatch(last,"[^/]+") do
- if pth == "" then
- pth = s
- elseif drive then
- pth, drive = pth .. s, false
- else
- pth = pth .. "/" .. s
- end
- if make_indeed and not lfs.isdir(pth) then
- lfs.mkdir(pth)
- end
- end
- return pth, (lfs.isdir(pth) == true)
- end
-
---~ print(dir.mkdirs("","","a","c"))
---~ print(dir.mkdirs("a"))
---~ print(dir.mkdirs("a:"))
---~ print(dir.mkdirs("a:/b/c"))
---~ print(dir.mkdirs("a:b/c"))
---~ print(dir.mkdirs("a:/bbb/c"))
---~ print(dir.mkdirs("/a/b/c"))
---~ print(dir.mkdirs("/aaa/b/c"))
---~ print(dir.mkdirs("//a/b/c"))
---~ print(dir.mkdirs("///a/b/c"))
---~ print(dir.mkdirs("a/bbb//ccc/"))
-
- function dir.expand_name(str) -- will be merged with cleanpath and collapsepath
- local first, nothing, last = match(str,"^(//)(//*)(.*)$")
- if first then
- first = dir.current() .. "/"
- end
- if not first then
- first, last = match(str,"^(//)/*(.*)$")
- end
- if not first then
- first, last = match(str,"^([a-zA-Z]:)(.*)$")
- if first and not find(last,"^/") then
- local d = lfs.currentdir()
- if lfs.chdir(first) then
- first = dir.current()
- end
- lfs.chdir(d)
- end
- end
- if not first then
- first, last = dir.current(), str
- end
- last = gsub(last,"//","/")
- last = gsub(last,"/%./","/")
- last = gsub(last,"^/*","")
- first = gsub(first,"/*$","")
- if last == "" then
- return first
- else
- return first .. "/" .. last
- end
- end
-
-else
-
- function dir.mkdirs(...)
- local str, pth, t = "", "", { ... }
- for i=1,#t do
- local s = t[i]
- if s ~= "" then
- if str ~= "" then
- str = str .. "/" .. s
- else
- str = s
- end
- end
- end
- str = gsub(str,"/+","/")
- if find(str,"^/") then
- pth = "/"
- for s in gmatch(str,"[^/]+") do
- local first = (pth == "/")
- if first then
- pth = pth .. s
- else
- pth = pth .. "/" .. s
- end
- if make_indeed and not first and not lfs.isdir(pth) then
- lfs.mkdir(pth)
- end
- end
- else
- pth = "."
- for s in gmatch(str,"[^/]+") do
- pth = pth .. "/" .. s
- if make_indeed and not lfs.isdir(pth) then
- lfs.mkdir(pth)
- end
- end
- end
- return pth, (lfs.isdir(pth) == true)
- end
-
---~ print(dir.mkdirs("","","a","c"))
---~ print(dir.mkdirs("a"))
---~ print(dir.mkdirs("/a/b/c"))
---~ print(dir.mkdirs("/aaa/b/c"))
---~ print(dir.mkdirs("//a/b/c"))
---~ print(dir.mkdirs("///a/b/c"))
---~ print(dir.mkdirs("a/bbb//ccc/"))
-
- function dir.expand_name(str) -- will be merged with cleanpath and collapsepath
- if not find(str,"^/") then
- str = lfs.currentdir() .. "/" .. str
- end
- str = gsub(str,"//","/")
- str = gsub(str,"/%./","/")
- return str
- end
-
-end
-
-dir.makedirs = dir.mkdirs
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['l-boolean'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-boolean = boolean or { }
-
-local type, tonumber = type, tonumber
-
-function boolean.tonumber(b)
- if b then return 1 else return 0 end
-end
-
-function toboolean(str,tolerant)
- if tolerant then
- local tstr = type(str)
- if tstr == "string" then
- return str == "true" or str == "yes" or str == "on" or str == "1" or str == "t"
- elseif tstr == "number" then
- return tonumber(str) ~= 0
- elseif tstr == "nil" then
- return false
- else
- return str
- end
- elseif str == "true" then
- return true
- elseif str == "false" then
- return false
- else
- return str
- end
-end
-
-function string.is_boolean(str)
- if type(str) == "string" then
- if str == "true" or str == "yes" or str == "on" or str == "t" then
- return true
- elseif str == "false" or str == "no" or str == "off" or str == "f" then
- return false
- end
- end
- return nil
-end
-
-function boolean.alwaystrue()
- return true
-end
-
-function boolean.falsetrue()
- return false
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['l-unicode'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-if not unicode then
-
- unicode = { utf8 = { } }
-
- local floor, char = math.floor, string.char
-
- function unicode.utf8.utfchar(n)
- if n < 0x80 then
- return char(n)
- elseif n < 0x800 then
- return char(0xC0 + floor(n/0x40)) .. char(0x80 + (n % 0x40))
- elseif n < 0x10000 then
- return char(0xE0 + floor(n/0x1000)) .. char(0x80 + (floor(n/0x40) % 0x40)) .. char(0x80 + (n % 0x40))
- elseif n < 0x40000 then
- return char(0xF0 + floor(n/0x40000)) .. char(0x80 + floor(n/0x1000)) .. char(0x80 + (floor(n/0x40) % 0x40)) .. char(0x80 + (n % 0x40))
- else -- wrong:
- -- return char(0xF1 + floor(n/0x1000000)) .. char(0x80 + floor(n/0x40000)) .. char(0x80 + floor(n/0x1000)) .. char(0x80 + (floor(n/0x40) % 0x40)) .. char(0x80 + (n % 0x40))
- return "?"
- end
- end
-
-end
-
-utf = utf or unicode.utf8
-
-local concat, utfchar, utfgsub = table.concat, utf.char, utf.gsub
-local char, byte, find, bytepairs = string.char, string.byte, string.find, string.bytepairs
-
--- 0 EF BB BF UTF-8
--- 1 FF FE UTF-16-little-endian
--- 2 FE FF UTF-16-big-endian
--- 3 FF FE 00 00 UTF-32-little-endian
--- 4 00 00 FE FF UTF-32-big-endian
-
-unicode.utfname = {
- [0] = 'utf-8',
- [1] = 'utf-16-le',
- [2] = 'utf-16-be',
- [3] = 'utf-32-le',
- [4] = 'utf-32-be'
-}
-
--- \000 fails in <= 5.0 but is valid in >=5.1 where %z is depricated
-
-function unicode.utftype(f)
- local str = f:read(4)
- if not str then
- f:seek('set')
- return 0
- -- elseif find(str,"^%z%z\254\255") then -- depricated
- -- elseif find(str,"^\000\000\254\255") then -- not permitted and bugged
- elseif find(str,"\000\000\254\255",1,true) then -- seems to work okay (TH)
- return 4
- -- elseif find(str,"^\255\254%z%z") then -- depricated
- -- elseif find(str,"^\255\254\000\000") then -- not permitted and bugged
- elseif find(str,"\255\254\000\000",1,true) then -- seems to work okay (TH)
- return 3
- elseif find(str,"^\254\255") then
- f:seek('set',2)
- return 2
- elseif find(str,"^\255\254") then
- f:seek('set',2)
- return 1
- elseif find(str,"^\239\187\191") then
- f:seek('set',3)
- return 0
- else
- f:seek('set')
- return 0
- end
-end
-
-function unicode.utf16_to_utf8(str, endian) -- maybe a gsub is faster or an lpeg
- local result, tmp, n, m, p = { }, { }, 0, 0, 0
- -- lf | cr | crlf / (cr:13, lf:10)
- local function doit()
- if n == 10 then
- if p ~= 13 then
- result[#result+1] = concat(tmp)
- tmp = { }
- p = 0
- end
- elseif n == 13 then
- result[#result+1] = concat(tmp)
- tmp = { }
- p = n
- else
- tmp[#tmp+1] = utfchar(n)
- p = 0
- end
- end
- for l,r in bytepairs(str) do
- if r then
- if endian then
- n = l*256 + r
- else
- n = r*256 + l
- end
- if m > 0 then
- n = (m-0xD800)*0x400 + (n-0xDC00) + 0x10000
- m = 0
- doit()
- elseif n >= 0xD800 and n <= 0xDBFF then
- m = n
- else
- doit()
- end
- end
- end
- if #tmp > 0 then
- result[#result+1] = concat(tmp)
- end
- return result
-end
-
-function unicode.utf32_to_utf8(str, endian)
- local result = { }
- local tmp, n, m, p = { }, 0, -1, 0
- -- lf | cr | crlf / (cr:13, lf:10)
- local function doit()
- if n == 10 then
- if p ~= 13 then
- result[#result+1] = concat(tmp)
- tmp = { }
- p = 0
- end
- elseif n == 13 then
- result[#result+1] = concat(tmp)
- tmp = { }
- p = n
- else
- tmp[#tmp+1] = utfchar(n)
- p = 0
- end
- end
- for a,b in bytepairs(str) do
- if a and b then
- if m < 0 then
- if endian then
- m = a*256*256*256 + b*256*256
- else
- m = b*256 + a
- end
- else
- if endian then
- n = m + a*256 + b
- else
- n = m + b*256*256*256 + a*256*256
- end
- m = -1
- doit()
- end
- else
- break
- end
- end
- if #tmp > 0 then
- result[#result+1] = concat(tmp)
- end
- return result
-end
-
-local function little(c)
- local b = byte(c) -- b = c:byte()
- if b < 0x10000 then
- return char(b%256,b/256)
- else
- b = b - 0x10000
- local b1, b2 = b/1024 + 0xD800, b%1024 + 0xDC00
- return char(b1%256,b1/256,b2%256,b2/256)
- end
-end
-
-local function big(c)
- local b = byte(c)
- if b < 0x10000 then
- return char(b/256,b%256)
- else
- b = b - 0x10000
- local b1, b2 = b/1024 + 0xD800, b%1024 + 0xDC00
- return char(b1/256,b1%256,b2/256,b2%256)
- end
-end
-
-function unicode.utf8_to_utf16(str,littleendian)
- if littleendian then
- return char(255,254) .. utfgsub(str,".",little)
- else
- return char(254,255) .. utfgsub(str,".",big)
- end
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['l-math'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local floor, sin, cos, tan = math.floor, math.sin, math.cos, math.tan
-
-if not math.round then
- function math.round(x)
- return floor(x + 0.5)
- end
-end
-
-if not math.div then
- function math.div(n,m)
- return floor(n/m)
- end
-end
-
-if not math.mod then
- function math.mod(n,m)
- return n % m
- end
-end
-
-local pipi = 2*math.pi/360
-
-function math.sind(d)
- return sin(d*pipi)
-end
-
-function math.cosd(d)
- return cos(d*pipi)
-end
-
-function math.tand(d)
- return tan(d*pipi)
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['l-utils'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- hm, quite unreadable
-
-local gsub = string.gsub
-local concat = table.concat
-local type, next = type, next
-
-if not utils then utils = { } end
-if not utils.merger then utils.merger = { } end
-if not utils.lua then utils.lua = { } end
-
-utils.merger.m_begin = "begin library merge"
-utils.merger.m_end = "end library merge"
-utils.merger.pattern =
- "%c+" ..
- "%-%-%s+" .. utils.merger.m_begin ..
- "%c+(.-)%c+" ..
- "%-%-%s+" .. utils.merger.m_end ..
- "%c+"
-
-function utils.merger._self_fake_()
- return
- "-- " .. "created merged file" .. "\n\n" ..
- "-- " .. utils.merger.m_begin .. "\n\n" ..
- "-- " .. utils.merger.m_end .. "\n\n"
-end
-
-function utils.report(...)
- print(...)
-end
-
-utils.merger.strip_comment = true
-
-function utils.merger._self_load_(name)
- local f, data = io.open(name), ""
- if f then
- utils.report("reading merge from %s",name)
- data = f:read("*all")
- f:close()
- else
- utils.report("unknown file to merge %s",name)
- end
- if data and utils.merger.strip_comment then
- -- saves some 20K
- data = gsub(data,"%-%-~[^\n\r]*[\r\n]", "")
- end
- return data or ""
-end
-
-function utils.merger._self_save_(name, data)
- if data ~= "" then
- local f = io.open(name,'w')
- if f then
- utils.report("saving merge from %s",name)
- f:write(data)
- f:close()
- end
- end
-end
-
-function utils.merger._self_swap_(data,code)
- if data ~= "" then
- return (gsub(data,utils.merger.pattern, function(s)
- return "\n\n" .. "-- "..utils.merger.m_begin .. "\n" .. code .. "\n" .. "-- "..utils.merger.m_end .. "\n\n"
- end, 1))
- else
- return ""
- end
-end
-
---~ stripper:
---~
---~ data = gsub(data,"%-%-~[^\n]*\n","")
---~ data = gsub(data,"\n\n+","\n")
-
-function utils.merger._self_libs_(libs,list)
- local result, f, frozen = { }, nil, false
- result[#result+1] = "\n"
- if type(libs) == 'string' then libs = { libs } end
- if type(list) == 'string' then list = { list } end
- local foundpath = nil
- for i=1,#libs do
- local lib = libs[i]
- for j=1,#list do
- local pth = gsub(list[j],"\\","/") -- file.clean_path
- utils.report("checking library path %s",pth)
- local name = pth .. "/" .. lib
- if lfs.isfile(name) then
- foundpath = pth
- end
- end
- if foundpath then break end
- end
- if foundpath then
- utils.report("using library path %s",foundpath)
- local right, wrong = { }, { }
- for i=1,#libs do
- local lib = libs[i]
- local fullname = foundpath .. "/" .. lib
- if lfs.isfile(fullname) then
- -- right[#right+1] = lib
- utils.report("merging library %s",fullname)
- result[#result+1] = "do -- create closure to overcome 200 locals limit"
- result[#result+1] = io.loaddata(fullname,true)
- result[#result+1] = "end -- of closure"
- else
- -- wrong[#wrong+1] = lib
- utils.report("no library %s",fullname)
- end
- end
- if #right > 0 then
- utils.report("merged libraries: %s",concat(right," "))
- end
- if #wrong > 0 then
- utils.report("skipped libraries: %s",concat(wrong," "))
- end
- else
- utils.report("no valid library path found")
- end
- return concat(result, "\n\n")
-end
-
-function utils.merger.selfcreate(libs,list,target)
- if target then
- utils.merger._self_save_(
- target,
- utils.merger._self_swap_(
- utils.merger._self_fake_(),
- utils.merger._self_libs_(libs,list)
- )
- )
- end
-end
-
-function utils.merger.selfmerge(name,libs,list,target)
- utils.merger._self_save_(
- target or name,
- utils.merger._self_swap_(
- utils.merger._self_load_(name),
- utils.merger._self_libs_(libs,list)
- )
- )
-end
-
-function utils.merger.selfclean(name)
- utils.merger._self_save_(
- name,
- utils.merger._self_swap_(
- utils.merger._self_load_(name),
- ""
- )
- )
-end
-
-function utils.lua.compile(luafile, lucfile, cleanup, strip) -- defaults: cleanup=false strip=true
- -- utils.report("compiling",luafile,"into",lucfile)
- os.remove(lucfile)
- local command = "-o " .. string.quote(lucfile) .. " " .. string.quote(luafile)
- if strip ~= false then
- command = "-s " .. command
- end
- local done = (os.spawn("texluac " .. command) == 0) or (os.spawn("luac " .. command) == 0)
- if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
- -- utils.report("removing",luafile)
- os.remove(luafile)
- end
- return done
-end
-
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['l-aux'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- for inline, no store split : for s in string.gmatch(str,",* *([^,]+)") do .. end
-
-aux = aux or { }
-
-local concat, format, gmatch = table.concat, string.format, string.gmatch
-local tostring, type = tostring, type
-local lpegmatch = lpeg.match
-
-local P, R, V = lpeg.P, lpeg.R, lpeg.V
-
-local escape, left, right = P("\\"), P('{'), P('}')
-
-lpeg.patterns.balanced = P {
- [1] = ((escape * (left+right)) + (1 - (left+right)) + V(2))^0,
- [2] = left * V(1) * right
-}
-
-local space = lpeg.P(' ')
-local equal = lpeg.P("=")
-local comma = lpeg.P(",")
-local lbrace = lpeg.P("{")
-local rbrace = lpeg.P("}")
-local nobrace = 1 - (lbrace+rbrace)
-local nested = lpeg.P { lbrace * (nobrace + lpeg.V(1))^0 * rbrace }
-local spaces = space^0
-
-local value = lpeg.P(lbrace * lpeg.C((nobrace + nested)^0) * rbrace) + lpeg.C((nested + (1-comma))^0)
-
-local key = lpeg.C((1-equal-comma)^1)
-local pattern_a = (space+comma)^0 * (key * equal * value + key * lpeg.C(""))
-local pattern_c = (space+comma)^0 * (key * equal * value)
-
-local key = lpeg.C((1-space-equal-comma)^1)
-local pattern_b = spaces * comma^0 * spaces * (key * ((spaces * equal * spaces * value) + lpeg.C("")))
-
--- "a=1, b=2, c=3, d={a{b,c}d}, e=12345, f=xx{a{b,c}d}xx, g={}" : outer {} removes, leading spaces ignored
-
-local hash = { }
-
-local function set(key,value) -- using Carg is slower here
- hash[key] = value
-end
-
-local pattern_a_s = (pattern_a/set)^1
-local pattern_b_s = (pattern_b/set)^1
-local pattern_c_s = (pattern_c/set)^1
-
-aux.settings_to_hash_pattern_a = pattern_a_s
-aux.settings_to_hash_pattern_b = pattern_b_s
-aux.settings_to_hash_pattern_c = pattern_c_s
-
-function aux.make_settings_to_hash_pattern(set,how)
- if how == "strict" then
- return (pattern_c/set)^1
- elseif how == "tolerant" then
- return (pattern_b/set)^1
- else
- return (pattern_a/set)^1
- end
-end
-
-function aux.settings_to_hash(str,existing)
- if str and str ~= "" then
- hash = existing or { }
- if moretolerant then
- lpegmatch(pattern_b_s,str)
- else
- lpegmatch(pattern_a_s,str)
- end
- return hash
- else
- return { }
- end
-end
-
-function aux.settings_to_hash_tolerant(str,existing)
- if str and str ~= "" then
- hash = existing or { }
- lpegmatch(pattern_b_s,str)
- return hash
- else
- return { }
- end
-end
-
-function aux.settings_to_hash_strict(str,existing)
- if str and str ~= "" then
- hash = existing or { }
- lpegmatch(pattern_c_s,str)
- return next(hash) and hash
- else
- return nil
- end
-end
-
-local separator = comma * space^0
-local value = lpeg.P(lbrace * lpeg.C((nobrace + nested)^0) * rbrace) + lpeg.C((nested + (1-comma))^0)
-local pattern = lpeg.Ct(value*(separator*value)^0)
-
--- "aap, {noot}, mies" : outer {} removes, leading spaces ignored
-
-aux.settings_to_array_pattern = pattern
-
--- we could use a weak table as cache
-
-function aux.settings_to_array(str)
- if not str or str == "" then
- return { }
- else
- return lpegmatch(pattern,str)
- end
-end
-
-local function set(t,v)
- t[#t+1] = v
-end
-
-local value = lpeg.P(lpeg.Carg(1)*value) / set
-local pattern = value*(separator*value)^0 * lpeg.Carg(1)
-
-function aux.add_settings_to_array(t,str)
- return lpegmatch(pattern,str,nil,t)
-end
-
-function aux.hash_to_string(h,separator,yes,no,strict,omit)
- if h then
- local t, s = { }, table.sortedkeys(h)
- omit = omit and table.tohash(omit)
- for i=1,#s do
- local key = s[i]
- if not omit or not omit[key] then
- local value = h[key]
- if type(value) == "boolean" then
- if yes and no then
- if value then
- t[#t+1] = key .. '=' .. yes
- elseif not strict then
- t[#t+1] = key .. '=' .. no
- end
- elseif value or not strict then
- t[#t+1] = key .. '=' .. tostring(value)
- end
- else
- t[#t+1] = key .. '=' .. value
- end
- end
- end
- return concat(t,separator or ",")
- else
- return ""
- end
-end
-
-function aux.array_to_string(a,separator)
- if a then
- return concat(a,separator or ",")
- else
- return ""
- end
-end
-
-function aux.settings_to_set(str,t)
- t = t or { }
- for s in gmatch(str,"%s*([^,]+)") do
- t[s] = true
- end
- return t
-end
-
-local value = lbrace * lpeg.C((nobrace + nested)^0) * rbrace
-local pattern = lpeg.Ct((space + value)^0)
-
-function aux.arguments_to_table(str)
- return lpegmatch(pattern,str)
-end
-
--- temporary here
-
-function aux.getparameters(self,class,parentclass,settings)
- local sc = self[class]
- if not sc then
- sc = table.clone(self[parent])
- self[class] = sc
- end
- aux.settings_to_hash(settings,sc)
-end
-
--- temporary here
-
-local digit = lpeg.R("09")
-local period = lpeg.P(".")
-local zero = lpeg.P("0")
-local trailingzeros = zero^0 * -digit -- suggested by Roberto R
-local case_1 = period * trailingzeros / ""
-local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "")
-local number = digit^1 * (case_1 + case_2)
-local stripper = lpeg.Cs((number + 1)^0)
-
---~ local sample = "bla 11.00 bla 11 bla 0.1100 bla 1.00100 bla 0.00 bla 0.001 bla 1.1100 bla 0.100100100 bla 0.00100100100"
---~ collectgarbage("collect")
---~ str = string.rep(sample,10000)
---~ local ts = os.clock()
---~ lpegmatch(stripper,str)
---~ print(#str, os.clock()-ts, lpegmatch(stripper,sample))
-
-lpeg.patterns.strip_zeros = stripper
-
-function aux.strip_zeros(str)
- return lpegmatch(stripper,str)
-end
-
-function aux.definetable(target) -- defines undefined tables
- local composed, t = nil, { }
- for name in gmatch(target,"([^%.]+)") do
- if composed then
- composed = composed .. "." .. name
- else
- composed = name
- end
- t[#t+1] = format("%s = %s or { }",composed,composed)
- end
- return concat(t,"\n")
-end
-
-function aux.accesstable(target)
- local t = _G
- for name in gmatch(target,"([^%.]+)") do
- t = t[name]
- end
- return t
-end
-
--- as we use this a lot ...
-
---~ function aux.cachefunction(action,weak)
---~ local cache = { }
---~ if weak then
---~ setmetatable(cache, { __mode = "kv" } )
---~ end
---~ local function reminder(str)
---~ local found = cache[str]
---~ if not found then
---~ found = action(str)
---~ cache[str] = found
---~ end
---~ return found
---~ end
---~ return reminder, cache
---~ end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['trac-tra'] = {
- version = 1.001,
- comment = "companion to trac-tra.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- the <anonymous> tag is kind of generic and used for functions that are not
--- bound to a variable, like node.new, node.copy etc (contrary to for instance
--- node.has_attribute which is bound to a has_attribute local variable in mkiv)
-
-local debug = require "debug"
-
-local getinfo = debug.getinfo
-local type, next = type, next
-local concat = table.concat
-local format, find, lower, gmatch, gsub = string.format, string.find, string.lower, string.gmatch, string.gsub
-
-debugger = debugger or { }
-
-local counters = { }
-local names = { }
-
--- one
-
-local function hook()
- local f = getinfo(2,"f").func
- local n = getinfo(2,"Sn")
--- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
- if f then
- local cf = counters[f]
- if cf == nil then
- counters[f] = 1
- names[f] = n
- else
- counters[f] = cf + 1
- end
- end
-end
-local function getname(func)
- local n = names[func]
- if n then
- if n.what == "C" then
- return n.name or '<anonymous>'
- else
- -- source short_src linedefined what name namewhat nups func
- local name = n.name or n.namewhat or n.what
- if not name or name == "" then name = "?" end
- return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
- end
- else
- return "unknown"
- end
-end
-function debugger.showstats(printer,threshold)
- printer = printer or texio.write or print
- threshold = threshold or 0
- local total, grandtotal, functions = 0, 0, 0
- printer("\n") -- ugly but ok
- -- table.sort(counters)
- for func, count in next, counters do
- if count > threshold then
- local name = getname(func)
- if not find(name,"for generator") then
- printer(format("%8i %s", count, name))
- total = total + count
- end
- end
- grandtotal = grandtotal + count
- functions = functions + 1
- end
- printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
-end
-
--- two
-
---~ local function hook()
---~ local n = getinfo(2)
---~ if n.what=="C" and not n.name then
---~ local f = tostring(debug.traceback())
---~ local cf = counters[f]
---~ if cf == nil then
---~ counters[f] = 1
---~ names[f] = n
---~ else
---~ counters[f] = cf + 1
---~ end
---~ end
---~ end
---~ function debugger.showstats(printer,threshold)
---~ printer = printer or texio.write or print
---~ threshold = threshold or 0
---~ local total, grandtotal, functions = 0, 0, 0
---~ printer("\n") -- ugly but ok
---~ -- table.sort(counters)
---~ for func, count in next, counters do
---~ if count > threshold then
---~ printer(format("%8i %s", count, func))
---~ total = total + count
---~ end
---~ grandtotal = grandtotal + count
---~ functions = functions + 1
---~ end
---~ printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
---~ end
-
--- rest
-
-function debugger.savestats(filename,threshold)
- local f = io.open(filename,'w')
- if f then
- debugger.showstats(function(str) f:write(str) end,threshold)
- f:close()
- end
-end
-
-function debugger.enable()
- debug.sethook(hook,"c")
-end
-
-function debugger.disable()
- debug.sethook()
---~ counters[debug.getinfo(2,"f").func] = nil
-end
-
-function debugger.tracing()
- local n = tonumber(os.env['MTX.TRACE.CALLS']) or tonumber(os.env['MTX_TRACE_CALLS']) or 0
- if n > 0 then
- function debugger.tracing() return true end ; return true
- else
- function debugger.tracing() return false end ; return false
- end
-end
-
---~ debugger.enable()
-
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
-
---~ debugger.disable()
-
---~ print("")
---~ debugger.showstats()
---~ print("")
---~ debugger.showstats(print,3)
-
-setters = setters or { }
-setters.data = setters.data or { }
-
---~ local function set(t,what,value)
---~ local data, done = t.data, t.done
---~ if type(what) == "string" then
---~ what = aux.settings_to_array(what) -- inefficient but ok
---~ end
---~ for i=1,#what do
---~ local w = what[i]
---~ for d, f in next, data do
---~ if done[d] then
---~ -- prevent recursion due to wildcards
---~ elseif find(d,w) then
---~ done[d] = true
---~ for i=1,#f do
---~ f[i](value)
---~ end
---~ end
---~ end
---~ end
---~ end
-
-local function set(t,what,value)
- local data, done = t.data, t.done
- if type(what) == "string" then
- what = aux.settings_to_hash(what) -- inefficient but ok
- end
- for w, v in next, what do
- if v == "" then
- v = value
- else
- v = toboolean(v)
- end
- for d, f in next, data do
- if done[d] then
- -- prevent recursion due to wildcards
- elseif find(d,w) then
- done[d] = true
- for i=1,#f do
- f[i](v)
- end
- end
- end
- end
-end
-
-local function reset(t)
- for d, f in next, t.data do
- for i=1,#f do
- f[i](false)
- end
- end
-end
-
-local function enable(t,what)
- set(t,what,true)
-end
-
-local function disable(t,what)
- local data = t.data
- if not what or what == "" then
- t.done = { }
- reset(t)
- else
- set(t,what,false)
- end
-end
-
-function setters.register(t,what,...)
- local data = t.data
- what = lower(what)
- local w = data[what]
- if not w then
- w = { }
- data[what] = w
- end
- for _, fnc in next, { ... } do
- local typ = type(fnc)
- if typ == "function" then
- w[#w+1] = fnc
- elseif typ == "string" then
- w[#w+1] = function(value) set(t,fnc,value,nesting) end
- end
- end
-end
-
-function setters.enable(t,what)
- local e = t.enable
- t.enable, t.done = enable, { }
- enable(t,string.simpleesc(tostring(what)))
- t.enable, t.done = e, { }
-end
-
-function setters.disable(t,what)
- local e = t.disable
- t.disable, t.done = disable, { }
- disable(t,string.simpleesc(tostring(what)))
- t.disable, t.done = e, { }
-end
-
-function setters.reset(t)
- t.done = { }
- reset(t)
-end
-
-function setters.list(t) -- pattern
- local list = table.sortedkeys(t.data)
- local user, system = { }, { }
- for l=1,#list do
- local what = list[l]
- if find(what,"^%*") then
- system[#system+1] = what
- else
- user[#user+1] = what
- end
- end
- return user, system
-end
-
-function setters.show(t)
- commands.writestatus("","")
- local list = setters.list(t)
- for k=1,#list do
- commands.writestatus(t.name,list[k])
- end
- commands.writestatus("","")
-end
-
--- we could have used a bit of oo and the trackers:enable syntax but
--- there is already a lot of code around using the singular tracker
-
--- we could make this into a module
-
-function setters.new(name)
- local t
- t = {
- data = { },
- name = name,
- enable = function(...) setters.enable (t,...) end,
- disable = function(...) setters.disable (t,...) end,
- register = function(...) setters.register(t,...) end,
- list = function(...) setters.list (t,...) end,
- show = function(...) setters.show (t,...) end,
- }
- setters.data[name] = t
- return t
-end
-
-trackers = setters.new("trackers")
-directives = setters.new("directives")
-experiments = setters.new("experiments")
-
--- nice trick: we overload two of the directives related functions with variants that
--- do tracing (itself using a tracker) .. proof of concept
-
-local trace_directives = false local trace_directives = false trackers.register("system.directives", function(v) trace_directives = v end)
-local trace_experiments = false local trace_experiments = false trackers.register("system.experiments", function(v) trace_experiments = v end)
-
-local e = directives.enable
-local d = directives.disable
-
-function directives.enable(...)
- commands.writestatus("directives","enabling: %s",concat({...}," "))
- e(...)
-end
-
-function directives.disable(...)
- commands.writestatus("directives","disabling: %s",concat({...}," "))
- d(...)
-end
-
-local e = experiments.enable
-local d = experiments.disable
-
-function experiments.enable(...)
- commands.writestatus("experiments","enabling: %s",concat({...}," "))
- e(...)
-end
-
-function experiments.disable(...)
- commands.writestatus("experiments","disabling: %s",concat({...}," "))
- d(...)
-end
-
--- a useful example
-
-directives.register("system.nostatistics", function(v)
- statistics.enable = not v
-end)
-
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['luat-env'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- A former version provided functionality for non embeded core
--- scripts i.e. runtime library loading. Given the amount of
--- Lua code we use now, this no longer makes sense. Much of this
--- evolved before bytecode arrays were available and so a lot of
--- code has disappeared already.
-
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-
-local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find
-local unquote, quote = string.unquote, string.quote
-
--- precautions
-
-os.setlocale(nil,nil) -- useless feature and even dangerous in luatex
-
-function os.setlocale()
- -- no way you can mess with it
-end
-
--- dirty tricks
-
-if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaonly" then
- arg[-1]=arg[0] arg[0]=arg[2] for k=3,#arg do arg[k-2]=arg[k] end arg[#arg]=nil arg[#arg]=nil
-end
-
-if profiler and os.env["MTX_PROFILE_RUN"] == "YES" then
- profiler.start("luatex-profile.log")
-end
-
--- environment
-
-environment = environment or { }
-environment.arguments = { }
-environment.files = { }
-environment.sortedflags = nil
-
-if not environment.jobname or environment.jobname == "" then if tex then environment.jobname = tex.jobname end end
-if not environment.version or environment.version == "" then environment.version = "unknown" end
-if not environment.jobname then environment.jobname = "unknown" end
-
-function environment.initialize_arguments(arg)
- local arguments, files = { }, { }
- environment.arguments, environment.files, environment.sortedflags = arguments, files, nil
- for index=1,#arg do
- local argument = arg[index]
- if index > 0 then
- local flag, value = match(argument,"^%-+(.-)=(.-)$")
- if flag then
- arguments[flag] = unquote(value or "")
- else
- flag = match(argument,"^%-+(.+)")
- if flag then
- arguments[flag] = true
- else
- files[#files+1] = argument
- end
- end
- end
- end
- environment.ownname = environment.ownname or arg[0] or 'unknown.lua'
-end
-
-function environment.setargument(name,value)
- environment.arguments[name] = value
-end
-
--- todo: defaults, better checks e.g on type (boolean versus string)
---
--- tricky: too many hits when we support partials unless we add
--- a registration of arguments so from now on we have 'partial'
-
-function environment.argument(name,partial)
- local arguments, sortedflags = environment.arguments, environment.sortedflags
- if arguments[name] then
- return arguments[name]
- elseif partial then
- if not sortedflags then
- sortedflags = table.sortedkeys(arguments)
- for k=1,#sortedflags do
- sortedflags[k] = "^" .. sortedflags[k]
- end
- environment.sortedflags = sortedflags
- end
- -- example of potential clash: ^mode ^modefile
- for k=1,#sortedflags do
- local v = sortedflags[k]
- if find(name,v) then
- return arguments[sub(v,2,#v)]
- end
- end
- end
- return nil
-end
-
-environment.argument("x",true)
-
-function environment.split_arguments(separator) -- rather special, cut-off before separator
- local done, before, after = false, { }, { }
- local original_arguments = environment.original_arguments
- for k=1,#original_arguments do
- local v = original_arguments[k]
- if not done and v == separator then
- done = true
- elseif done then
- after[#after+1] = v
- else
- before[#before+1] = v
- end
- end
- return before, after
-end
-
-function environment.reconstruct_commandline(arg,noquote)
- arg = arg or environment.original_arguments
- if noquote and #arg == 1 then
- local a = arg[1]
- a = resolvers.resolve(a)
- a = unquote(a)
- return a
- elseif #arg > 0 then
- local result = { }
- for i=1,#arg do
- local a = arg[i]
- a = resolvers.resolve(a)
- a = unquote(a)
- a = gsub(a,'"','\\"') -- tricky
- if find(a," ") then
- result[#result+1] = quote(a)
- else
- result[#result+1] = a
- end
- end
- return table.join(result," ")
- else
- return ""
- end
-end
-
-if arg then
-
- -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later)
- local newarg, instring = { }, false
-
- for index=1,#arg do
- local argument = arg[index]
- if find(argument,"^\"") then
- newarg[#newarg+1] = gsub(argument,"^\"","")
- if not find(argument,"\"$") then
- instring = true
- end
- elseif find(argument,"\"$") then
- newarg[#newarg] = newarg[#newarg] .. " " .. gsub(argument,"\"$","")
- instring = false
- elseif instring then
- newarg[#newarg] = newarg[#newarg] .. " " .. argument
- else
- newarg[#newarg+1] = argument
- end
- end
- for i=1,-5,-1 do
- newarg[i] = arg[i]
- end
-
- environment.initialize_arguments(newarg)
- environment.original_arguments = newarg
- environment.raw_arguments = arg
-
- arg = { } -- prevent duplicate handling
-
-end
-
--- weird place ... depends on a not yet loaded module
-
-function environment.texfile(filename)
- return resolvers.find_file(filename,'tex')
-end
-
-function environment.luafile(filename)
- local resolved = resolvers.find_file(filename,'tex') or ""
- if resolved ~= "" then
- return resolved
- end
- resolved = resolvers.find_file(filename,'texmfscripts') or ""
- if resolved ~= "" then
- return resolved
- end
- return resolvers.find_file(filename,'luatexlibs') or ""
-end
-
-environment.loadedluacode = loadfile -- can be overloaded
-
---~ function environment.loadedluacode(name)
---~ if os.spawn("texluac -s -o texluac.luc " .. name) == 0 then
---~ local chunk = loadstring(io.loaddata("texluac.luc"))
---~ os.remove("texluac.luc")
---~ return chunk
---~ else
---~ environment.loadedluacode = loadfile -- can be overloaded
---~ return loadfile(name)
---~ end
---~ end
-
-function environment.luafilechunk(filename) -- used for loading lua bytecode in the format
- filename = file.replacesuffix(filename, "lua")
- local fullname = environment.luafile(filename)
- if fullname and fullname ~= "" then
- if trace_locating then
- logs.report("fileio","loading file %s", fullname)
- end
- return environment.loadedluacode(fullname)
- else
- if trace_locating then
- logs.report("fileio","unknown file %s", filename)
- end
- return nil
- end
-end
-
--- the next ones can use the previous ones / combine
-
-function environment.loadluafile(filename, version)
- local lucname, luaname, chunk
- local basename = file.removesuffix(filename)
- if basename == filename then
- lucname, luaname = basename .. ".luc", basename .. ".lua"
- else
- lucname, luaname = nil, basename -- forced suffix
- end
- -- when not overloaded by explicit suffix we look for a luc file first
- local fullname = (lucname and environment.luafile(lucname)) or ""
- if fullname ~= "" then
- if trace_locating then
- logs.report("fileio","loading %s", fullname)
- end
- chunk = loadfile(fullname) -- this way we don't need a file exists check
- end
- if chunk then
- assert(chunk)()
- if version then
- -- we check of the version number of this chunk matches
- local v = version -- can be nil
- if modules and modules[filename] then
- v = modules[filename].version -- new method
- elseif versions and versions[filename] then
- v = versions[filename] -- old method
- end
- if v == version then
- return true
- else
- if trace_locating then
- logs.report("fileio","version mismatch for %s: lua=%s, luc=%s", filename, v, version)
- end
- environment.loadluafile(filename)
- end
- else
- return true
- end
- end
- fullname = (luaname and environment.luafile(luaname)) or ""
- if fullname ~= "" then
- if trace_locating then
- logs.report("fileio","loading %s", fullname)
- end
- chunk = loadfile(fullname) -- this way we don't need a file exists check
- if not chunk then
- if trace_locating then
- logs.report("fileio","unknown file %s", filename)
- end
- else
- assert(chunk)()
- return true
- end
- end
- return false
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['trac-inf'] = {
- version = 1.001,
- comment = "companion to trac-inf.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format = string.format
-
-local statusinfo, n, registered = { }, 0, { }
-
-statistics = statistics or { }
-
-statistics.enable = true
-statistics.threshold = 0.05
-
--- timing functions
-
-local clock = os.gettimeofday or os.clock
-
-local notimer
-
-function statistics.hastimer(instance)
- return instance and instance.starttime
-end
-
-function statistics.resettiming(instance)
- if not instance then
- notimer = { timing = 0, loadtime = 0 }
- else
- instance.timing, instance.loadtime = 0, 0
- end
-end
-
-function statistics.starttiming(instance)
- if not instance then
- notimer = { }
- instance = notimer
- end
- local it = instance.timing
- if not it then
- it = 0
- end
- if it == 0 then
- instance.starttime = clock()
- if not instance.loadtime then
- instance.loadtime = 0
- end
- else
---~ logs.report("system","nested timing (%s)",tostring(instance))
- end
- instance.timing = it + 1
-end
-
-function statistics.stoptiming(instance, report)
- if not instance then
- instance = notimer
- end
- if instance then
- local it = instance.timing
- if it > 1 then
- instance.timing = it - 1
- else
- local starttime = instance.starttime
- if starttime then
- local stoptime = clock()
- local loadtime = stoptime - starttime
- instance.stoptime = stoptime
- instance.loadtime = instance.loadtime + loadtime
- if report then
- statistics.report("load time %0.3f",loadtime)
- end
- instance.timing = 0
- return loadtime
- end
- end
- end
- return 0
-end
-
-function statistics.elapsedtime(instance)
- if not instance then
- instance = notimer
- end
- return format("%0.3f",(instance and instance.loadtime) or 0)
-end
-
-function statistics.elapsedindeed(instance)
- if not instance then
- instance = notimer
- end
- local t = (instance and instance.loadtime) or 0
- return t > statistics.threshold
-end
-
-function statistics.elapsedseconds(instance,rest) -- returns nil if 0 seconds
- if statistics.elapsedindeed(instance) then
- return format("%s seconds %s", statistics.elapsedtime(instance),rest or "")
- end
-end
-
--- general function
-
-function statistics.register(tag,fnc)
- if statistics.enable and type(fnc) == "function" then
- local rt = registered[tag] or (#statusinfo + 1)
- statusinfo[rt] = { tag, fnc }
- registered[tag] = rt
- if #tag > n then n = #tag end
- end
-end
-
-function statistics.show(reporter)
- if statistics.enable then
- if not reporter then reporter = function(tag,data,n) texio.write_nl(tag .. " " .. data) end end
- -- this code will move
- local register = statistics.register
- register("luatex banner", function()
- return string.lower(status.banner)
- end)
- register("control sequences", function()
- return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
- end)
- register("callbacks", function()
- local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
- return format("direct: %s, indirect: %s, total: %s", total-indirect, indirect, total)
- end)
- register("current memory usage", statistics.memused)
- register("runtime",statistics.runtime)
--- --
- for i=1,#statusinfo do
- local s = statusinfo[i]
- local r = s[2]()
- if r then
- reporter(s[1],r,n)
- end
- end
- texio.write_nl("") -- final newline
- statistics.enable = false
- end
-end
-
-function statistics.show_job_stat(tag,data,n)
- texio.write_nl(format("%-15s: %s - %s","mkiv lua stats",tag:rpadd(n," "),data))
-end
-
-function statistics.memused() -- no math.round yet -)
- local round = math.round or math.floor
- return format("%s MB (ctx: %s MB)",round(collectgarbage("count")/1000), round(status.luastate_bytes/1000000))
-end
-
-if statistics.runtime then
- -- already loaded and set
-elseif luatex and luatex.starttime then
- statistics.starttime = luatex.starttime
- statistics.loadtime = 0
- statistics.timing = 0
-else
- statistics.starttiming(statistics)
-end
-
-function statistics.runtime()
- statistics.stoptiming(statistics)
- return statistics.formatruntime(statistics.elapsedtime(statistics))
-end
-
-function statistics.formatruntime(runtime)
- return format("%s seconds", statistics.elapsedtime(statistics))
-end
-
-function statistics.timed(action,report)
- local timer = { }
- report = report or logs.simple
- statistics.starttiming(timer)
- action()
- statistics.stoptiming(timer)
- report("total runtime: %s",statistics.elapsedtime(timer))
-end
-
--- where, not really the best spot for this:
-
-commands = commands or { }
-
-local timer
-
-function commands.resettimer()
- statistics.resettiming(timer)
- statistics.starttiming(timer)
-end
-
-function commands.elapsedtime()
- statistics.stoptiming(timer)
- tex.sprint(statistics.elapsedtime(timer))
-end
-
-commands.resettimer()
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['trac-log'] = {
- version = 1.001,
- comment = "companion to trac-log.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- this is old code that needs an overhaul
-
---~ io.stdout:setvbuf("no")
---~ io.stderr:setvbuf("no")
-
-local write_nl, write = texio.write_nl or print, texio.write or io.write
-local format, gmatch = string.format, string.gmatch
-local texcount = tex and tex.count
-
-if texlua then
- write_nl = print
- write = io.write
-end
-
---[[ldx--
-<p>This is a prelude to a more extensive logging module. For the sake
-of parsing log files, in addition to the standard logging we will
-provide an <l n='xml'/> structured file. Actually, any logging that
-is hooked into callbacks will be \XML\ by default.</p>
---ldx]]--
-
-logs = logs or { }
-logs.xml = logs.xml or { }
-logs.tex = logs.tex or { }
-
---[[ldx--
-<p>This looks pretty ugly but we need to speed things up a bit.</p>
---ldx]]--
-
-logs.moreinfo = [[
-more information about ConTeXt and the tools that come with it can be found at:
-
-maillist : ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
-webpage : http://www.pragma-ade.nl / http://tex.aanhet.net
-wiki : http://contextgarden.net
-]]
-
-logs.levels = {
- ['error'] = 1,
- ['warning'] = 2,
- ['info'] = 3,
- ['debug'] = 4,
-}
-
-logs.functions = {
- 'report', 'start', 'stop', 'push', 'pop', 'line', 'direct',
- 'start_run', 'stop_run',
- 'start_page_number', 'stop_page_number',
- 'report_output_pages', 'report_output_log',
- 'report_tex_stat', 'report_job_stat',
- 'show_open', 'show_close', 'show_load',
-}
-
-logs.tracers = {
-}
-
-logs.level = 0
-logs.mode = string.lower((os.getenv("MTX.LOG.MODE") or os.getenv("MTX_LOG_MODE") or "tex"))
-
-function logs.set_level(level)
- logs.level = logs.levels[level] or level
-end
-
-function logs.set_method(method)
- for _, v in next, logs.functions do
- logs[v] = logs[method][v] or function() end
- end
-end
-
--- tex logging
-
-function logs.tex.report(category,fmt,...) -- new
- if fmt then
- write_nl(category .. " | " .. format(fmt,...))
- else
- write_nl(category .. " |")
- end
-end
-
-function logs.tex.line(fmt,...) -- new
- if fmt then
- write_nl(format(fmt,...))
- else
- write_nl("")
- end
-end
-
---~ function logs.tex.start_page_number()
---~ local real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
---~ if real > 0 then
---~ if user > 0 then
---~ if sub > 0 then
---~ write(format("[%s.%s.%s",real,user,sub))
---~ else
---~ write(format("[%s.%s",real,user))
---~ end
---~ else
---~ write(format("[%s",real))
---~ end
---~ else
---~ write("[-")
---~ end
---~ end
-
---~ function logs.tex.stop_page_number()
---~ write("]")
---~ end
-
-local real, user, sub
-
-function logs.tex.start_page_number()
- real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
-end
-
-function logs.tex.stop_page_number()
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- logs.report("pages", "flushing realpage %s, userpage %s, subpage %s",real,user,sub)
- else
- logs.report("pages", "flushing realpage %s, userpage %s",real,user)
- end
- else
- logs.report("pages", "flushing realpage %s",real)
- end
- else
- logs.report("pages", "flushing page")
- end
- io.flush()
-end
-
-logs.tex.report_job_stat = statistics.show_job_stat
-
--- xml logging
-
-function logs.xml.report(category,fmt,...) -- new
- if fmt then
- write_nl(format("<r category='%s'>%s</r>",category,format(fmt,...)))
- else
- write_nl(format("<r category='%s'/>",category))
- end
-end
-function logs.xml.line(fmt,...) -- new
- if fmt then
- write_nl(format("<r>%s</r>",format(fmt,...)))
- else
- write_nl("<r/>")
- end
-end
-
-function logs.xml.start() if logs.level > 0 then tw("<%s>" ) end end
-function logs.xml.stop () if logs.level > 0 then tw("</%s>") end end
-function logs.xml.push () if logs.level > 0 then tw("<!-- ") end end
-function logs.xml.pop () if logs.level > 0 then tw(" -->" ) end end
-
-function logs.xml.start_run()
- write_nl("<?xml version='1.0' standalone='yes'?>")
- write_nl("<job>") -- xmlns='www.pragma-ade.com/luatex/schemas/context-job.rng'
- write_nl("")
-end
-
-function logs.xml.stop_run()
- write_nl("</job>")
-end
-
-function logs.xml.start_page_number()
- write_nl(format("<p real='%s' page='%s' sub='%s'", texcount.realpageno, texcount.userpageno, texcount.subpageno))
-end
-
-function logs.xml.stop_page_number()
- write("/>")
- write_nl("")
-end
-
-function logs.xml.report_output_pages(p,b)
- write_nl(format("<v k='pages' v='%s'/>", p))
- write_nl(format("<v k='bytes' v='%s'/>", b))
- write_nl("")
-end
-
-function logs.xml.report_output_log()
-end
-
-function logs.xml.report_tex_stat(k,v)
- texiowrite_nl("log","<v k='"..k.."'>"..tostring(v).."</v>")
-end
-
-local level = 0
-
-function logs.xml.show_open(name)
- level = level + 1
- texiowrite_nl(format("<f l='%s' n='%s'>",level,name))
-end
-
-function logs.xml.show_close(name)
- texiowrite("</f> ")
- level = level - 1
-end
-
-function logs.xml.show_load(name)
- texiowrite_nl(format("<f l='%s' n='%s'/>",level+1,name))
-end
-
---
-
-local name, banner = 'report', 'context'
-
-local function report(category,fmt,...)
- if fmt then
- write_nl(format("%s | %s: %s",name,category,format(fmt,...)))
- elseif category then
- write_nl(format("%s | %s",name,category))
- else
- write_nl(format("%s |",name))
- end
-end
-
-local function simple(fmt,...)
- if fmt then
- write_nl(format("%s | %s",name,format(fmt,...)))
- else
- write_nl(format("%s |",name))
- end
-end
-
-function logs.setprogram(_name_,_banner_,_verbose_)
- name, banner = _name_, _banner_
- if _verbose_ then
- trackers.enable("resolvers.locating")
- end
- logs.set_method("tex")
- logs.report = report -- also used in libraries
- logs.simple = simple -- only used in scripts !
- if utils then
- utils.report = simple
- end
- logs.verbose = _verbose_
-end
-
-function logs.setverbose(what)
- if what then
- trackers.enable("resolvers.locating")
- else
- trackers.disable("resolvers.locating")
- end
- logs.verbose = what or false
-end
-
-function logs.extendbanner(_banner_,_verbose_)
- banner = banner .. " | ".. _banner_
- if _verbose_ ~= nil then
- logs.setverbose(what)
- end
-end
-
-logs.verbose = false
-logs.report = logs.tex.report
-logs.simple = logs.tex.report
-
-function logs.reportlines(str) -- todo: <lines></lines>
- for line in gmatch(str,"(.-)[\n\r]") do
- logs.report(line)
- end
-end
-
-function logs.reportline() -- for scripts too
- logs.report()
-end
-
-logs.simpleline = logs.reportline
-
-function logs.reportbanner() -- for scripts too
- logs.report(banner)
-end
-
-function logs.help(message,option)
- logs.reportbanner()
- logs.reportline()
- logs.reportlines(message)
- local moreinfo = logs.moreinfo or ""
- if moreinfo ~= "" and option ~= "nomoreinfo" then
- logs.reportline()
- logs.reportlines(moreinfo)
- end
-end
-
-logs.set_level('error')
-logs.set_method('tex')
-
-function logs.system(whereto,process,jobname,category,...)
- for i=1,10 do
- local f = io.open(whereto,"a")
- if f then
- f:write(format("%s %s => %s => %s => %s\r",os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...)))
- f:close()
- break
- else
- sleep(0.1)
- end
- end
-end
-
---~ local syslogname = "oeps.xxx"
---~
---~ for i=1,10 do
---~ logs.system(syslogname,"context","test","fonts","font %s recached due to newer version (%s)","blabla","123")
---~ end
-
-function logs.fatal(where,...)
- logs.report(where,"fatal error: %s, aborting now",format(...))
- os.exit()
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['data-inp'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- After a few years using the code the large luat-inp.lua file
--- has been split up a bit. In the process some functionality was
--- dropped:
---
--- * support for reading lsr files
--- * selective scanning (subtrees)
--- * some public auxiliary functions were made private
---
--- TODO: os.getenv -> os.env[]
--- TODO: instances.[hashes,cnffiles,configurations,522]
--- TODO: check escaping in find etc, too much, too slow
-
--- This lib is multi-purpose and can be loaded again later on so that
--- additional functionality becomes available. We will split thislogs.report("fileio",
--- module in components once we're done with prototyping. This is the
--- first code I wrote for LuaTeX, so it needs some cleanup. Before changing
--- something in this module one can best check with Taco or Hans first; there
--- is some nasty trickery going on that relates to traditional kpse support.
-
--- To be considered: hash key lowercase, first entry in table filename
--- (any case), rest paths (so no need for optimization). Or maybe a
--- separate table that matches lowercase names to mixed case when
--- present. In that case the lower() cases can go away. I will do that
--- only when we run into problems with names ... well ... Iwona-Regular.
-
--- Beware, loading and saving is overloaded in luat-tmp!
-
-local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
-local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
-local next, type = next, type
-local lpegmatch = lpeg.match
-
-local trace_locating, trace_detail, trace_expansions = false, false, false
-
-trackers.register("resolvers.locating", function(v) trace_locating = v end)
-trackers.register("resolvers.details", function(v) trace_detail = v end)
-trackers.register("resolvers.expansions", function(v) trace_expansions = v end) -- todo
-
-if not resolvers then
- resolvers = {
- suffixes = { },
- formats = { },
- dangerous = { },
- suffixmap = { },
- alternatives = { },
- locators = { }, -- locate databases
- hashers = { }, -- load databases
- generators = { }, -- generate databases
- }
-end
-
-local resolvers = resolvers
-
-resolvers.locators .notfound = { nil }
-resolvers.hashers .notfound = { nil }
-resolvers.generators.notfound = { nil }
-
-resolvers.cacheversion = '1.0.1'
-resolvers.cnfname = 'texmf.cnf'
-resolvers.luaname = 'texmfcnf.lua'
-resolvers.homedir = os.env[os.type == "windows" and 'USERPROFILE'] or os.env['HOME'] or '~'
-resolvers.cnfdefault = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}'
-
-local dummy_path_expr = "^!*unset/*$"
-
-local formats = resolvers.formats
-local suffixes = resolvers.suffixes
-local dangerous = resolvers.dangerous
-local suffixmap = resolvers.suffixmap
-local alternatives = resolvers.alternatives
-
-formats['afm'] = 'AFMFONTS' suffixes['afm'] = { 'afm' }
-formats['enc'] = 'ENCFONTS' suffixes['enc'] = { 'enc' }
-formats['fmt'] = 'TEXFORMATS' suffixes['fmt'] = { 'fmt' }
-formats['map'] = 'TEXFONTMAPS' suffixes['map'] = { 'map' }
-formats['mp'] = 'MPINPUTS' suffixes['mp'] = { 'mp' }
-formats['ocp'] = 'OCPINPUTS' suffixes['ocp'] = { 'ocp' }
-formats['ofm'] = 'OFMFONTS' suffixes['ofm'] = { 'ofm', 'tfm' }
-formats['otf'] = 'OPENTYPEFONTS' suffixes['otf'] = { 'otf' } -- 'ttf'
-formats['opl'] = 'OPLFONTS' suffixes['opl'] = { 'opl' }
-formats['otp'] = 'OTPINPUTS' suffixes['otp'] = { 'otp' }
-formats['ovf'] = 'OVFFONTS' suffixes['ovf'] = { 'ovf', 'vf' }
-formats['ovp'] = 'OVPFONTS' suffixes['ovp'] = { 'ovp' }
-formats['tex'] = 'TEXINPUTS' suffixes['tex'] = { 'tex' }
-formats['tfm'] = 'TFMFONTS' suffixes['tfm'] = { 'tfm' }
-formats['ttf'] = 'TTFONTS' suffixes['ttf'] = { 'ttf', 'ttc', 'dfont' }
-formats['pfb'] = 'T1FONTS' suffixes['pfb'] = { 'pfb', 'pfa' }
-formats['vf'] = 'VFFONTS' suffixes['vf'] = { 'vf' }
-
-formats['fea'] = 'FONTFEATURES' suffixes['fea'] = { 'fea' }
-formats['cid'] = 'FONTCIDMAPS' suffixes['cid'] = { 'cid', 'cidmap' }
-
-formats ['texmfscripts'] = 'TEXMFSCRIPTS' -- new
-suffixes['texmfscripts'] = { 'rb', 'pl', 'py' } -- 'lua'
-
-formats ['lua'] = 'LUAINPUTS' -- new
-suffixes['lua'] = { 'lua', 'luc', 'tma', 'tmc' }
-
--- backward compatible ones
-
-alternatives['map files'] = 'map'
-alternatives['enc files'] = 'enc'
-alternatives['cid maps'] = 'cid' -- great, why no cid files
-alternatives['font feature files'] = 'fea' -- and fea files here
-alternatives['opentype fonts'] = 'otf'
-alternatives['truetype fonts'] = 'ttf'
-alternatives['truetype collections'] = 'ttc'
-alternatives['truetype dictionary'] = 'dfont'
-alternatives['type1 fonts'] = 'pfb'
-
--- obscure ones
-
-formats ['misc fonts'] = ''
-suffixes['misc fonts'] = { }
-
-formats ['sfd'] = 'SFDFONTS'
-suffixes ['sfd'] = { 'sfd' }
-alternatives['subfont definition files'] = 'sfd'
-
--- lib paths
-
-formats ['lib'] = 'CLUAINPUTS' -- new (needs checking)
-suffixes['lib'] = (os.libsuffix and { os.libsuffix }) or { 'dll', 'so' }
-
--- In practice we will work within one tds tree, but i want to keep
--- the option open to build tools that look at multiple trees, which is
--- why we keep the tree specific data in a table. We used to pass the
--- instance but for practical pusposes we now avoid this and use a
--- instance variable.
-
--- here we catch a few new thingies (todo: add these paths to context.tmf)
---
--- FONTFEATURES = .;$TEXMF/fonts/fea//
--- FONTCIDMAPS = .;$TEXMF/fonts/cid//
-
--- we always have one instance active
-
-resolvers.instance = resolvers.instance or nil -- the current one (slow access)
-local instance = resolvers.instance or nil -- the current one (fast access)
-
-function resolvers.newinstance()
-
- -- store once, freeze and faster (once reset we can best use
- -- instance.environment) maybe better have a register suffix
- -- function
-
- for k, v in next, suffixes do
- for i=1,#v do
- local vi = v[i]
- if vi then
- suffixmap[vi] = k
- end
- end
- end
-
- -- because vf searching is somewhat dangerous, we want to prevent
- -- too liberal searching esp because we do a lookup on the current
- -- path anyway; only tex (or any) is safe
-
- for k, v in next, formats do
- dangerous[k] = true
- end
- dangerous.tex = nil
-
- -- the instance
-
- local newinstance = {
- rootpath = '',
- treepath = '',
- progname = 'context',
- engine = 'luatex',
- format = '',
- environment = { },
- variables = { },
- expansions = { },
- files = { },
- remap = { },
- configuration = { },
- setup = { },
- order = { },
- found = { },
- foundintrees = { },
- kpsevars = { },
- hashes = { },
- cnffiles = { },
- luafiles = { },
- lists = { },
- remember = true,
- diskcache = true,
- renewcache = false,
- scandisk = true,
- cachepath = nil,
- loaderror = false,
- sortdata = false,
- savelists = true,
- cleanuppaths = true,
- allresults = false,
- pattern = nil, -- lists
- data = { }, -- only for loading
- force_suffixes = true,
- fakepaths = { },
- }
-
- local ne = newinstance.environment
-
- for k,v in next, os.env do
- ne[k] = resolvers.bare_variable(v)
- end
-
- return newinstance
-
-end
-
-function resolvers.setinstance(someinstance)
- instance = someinstance
- resolvers.instance = someinstance
- return someinstance
-end
-
-function resolvers.reset()
- return resolvers.setinstance(resolvers.newinstance())
-end
-
-local function reset_hashes()
- instance.lists = { }
- instance.found = { }
-end
-
-local function check_configuration() -- not yet ok, no time for debugging now
- local ie, iv = instance.environment, instance.variables
- local function fix(varname,default)
- local proname = varname .. "." .. instance.progname or "crap"
- local p, v = ie[proname], ie[varname] or iv[varname]
- if not ((p and p ~= "") or (v and v ~= "")) then
- iv[varname] = default -- or environment?
- end
- end
- local name = os.name
- if name == "windows" then
- fix("OSFONTDIR", "c:/windows/fonts//")
- elseif name == "macosx" then
- fix("OSFONTDIR", "$HOME/Library/Fonts//;/Library/Fonts//;/System/Library/Fonts//")
- else
- -- bad luck
- end
- fix("LUAINPUTS" , ".;$TEXINPUTS;$TEXMFSCRIPTS") -- no progname, hm
- -- this will go away some day
- fix("FONTFEATURES", ".;$TEXMF/fonts/{data,fea}//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS")
- fix("FONTCIDMAPS" , ".;$TEXMF/fonts/{data,cid}//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS")
- --
- fix("LUATEXLIBS" , ".;$TEXMF/luatex/lua//")
-end
-
-function resolvers.bare_variable(str) -- assumes str is a string
- return (gsub(str,"\s*([\"\']?)(.+)%1\s*", "%2"))
-end
-
-function resolvers.settrace(n) -- no longer number but: 'locating' or 'detail'
- if n then
- trackers.disable("resolvers.*")
- trackers.enable("resolvers."..n)
- end
-end
-
-resolvers.settrace(os.getenv("MTX_INPUT_TRACE"))
-
-function resolvers.osenv(key)
- local ie = instance.environment
- local value = ie[key]
- if value == nil then
- -- local e = os.getenv(key)
- local e = os.env[key]
- if e == nil then
- -- value = "" -- false
- else
- value = resolvers.bare_variable(e)
- end
- ie[key] = value
- end
- return value or ""
-end
-
-function resolvers.env(key)
- return instance.environment[key] or resolvers.osenv(key)
-end
-
---
-
-local function expand_vars(lst) -- simple vars
- local variables, env = instance.variables, resolvers.env
- local function resolve(a)
- return variables[a] or env(a)
- end
- for k=1,#lst do
- lst[k] = gsub(lst[k],"%$([%a%d%_%-]+)",resolve)
- end
-end
-
-local function expanded_var(var) -- simple vars
- local function resolve(a)
- return instance.variables[a] or resolvers.env(a)
- end
- return (gsub(var,"%$([%a%d%_%-]+)",resolve))
-end
-
-local function entry(entries,name)
- if name and (name ~= "") then
- name = gsub(name,'%$','')
- local result = entries[name..'.'..instance.progname] or entries[name]
- if result then
- return result
- else
- result = resolvers.env(name)
- if result then
- instance.variables[name] = result
- resolvers.expand_variables()
- return instance.expansions[name] or ""
- end
- end
- end
- return ""
-end
-
-local function is_entry(entries,name)
- if name and name ~= "" then
- name = gsub(name,'%$','')
- return (entries[name..'.'..instance.progname] or entries[name]) ~= nil
- else
- return false
- end
-end
-
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
--- this one is better and faster, but it took me a while to realize
--- that this kind of replacement is cleaner than messy parsing and
--- fuzzy concatenating we can probably gain a bit with selectively
--- applying lpeg, but experiments with lpeg parsing this proved not to
--- work that well; the parsing is ok, but dealing with the resulting
--- table is a pain because we need to work inside-out recursively
-
-local function do_first(a,b)
- local t = { }
- for s in gmatch(b,"[^,]+") do t[#t+1] = a .. s end
- return "{" .. concat(t,",") .. "}"
-end
-
-local function do_second(a,b)
- local t = { }
- for s in gmatch(a,"[^,]+") do t[#t+1] = s .. b end
- return "{" .. concat(t,",") .. "}"
-end
-
-local function do_both(a,b)
- local t = { }
- for sa in gmatch(a,"[^,]+") do
- for sb in gmatch(b,"[^,]+") do
- t[#t+1] = sa .. sb
- end
- end
- return "{" .. concat(t,",") .. "}"
-end
-
-local function do_three(a,b,c)
- return a .. b.. c
-end
-
-local function splitpathexpr(str, t, validate)
- -- no need for further optimization as it is only called a
- -- few times, we can use lpeg for the sub
- if trace_expansions then
- logs.report("fileio","expanding variable '%s'",str)
- end
- t = t or { }
- str = gsub(str,",}",",@}")
- str = gsub(str,"{,","{@,")
- -- str = "@" .. str .. "@"
- local ok, done
- while true do
- done = false
- while true do
- str, ok = gsub(str,"([^{},]+){([^{}]+)}",do_first)
- if ok > 0 then done = true else break end
- end
- while true do
- str, ok = gsub(str,"{([^{}]+)}([^{},]+)",do_second)
- if ok > 0 then done = true else break end
- end
- while true do
- str, ok = gsub(str,"{([^{}]+)}{([^{}]+)}",do_both)
- if ok > 0 then done = true else break end
- end
- str, ok = gsub(str,"({[^{}]*){([^{}]+)}([^{}]*})",do_three)
- if ok > 0 then done = true end
- if not done then break end
- end
- str = gsub(str,"[{}]", "")
- str = gsub(str,"@","")
- if validate then
- for s in gmatch(str,"[^,]+") do
- s = validate(s)
- if s then t[#t+1] = s end
- end
- else
- for s in gmatch(str,"[^,]+") do
- t[#t+1] = s
- end
- end
- if trace_expansions then
- for k=1,#t do
- logs.report("fileio","% 4i: %s",k,t[k])
- end
- end
- return t
-end
-
-local function expanded_path_from_list(pathlist) -- maybe not a list, just a path
- -- a previous version fed back into pathlist
- local newlist, ok = { }, false
- for k=1,#pathlist do
- if find(pathlist[k],"[{}]") then
- ok = true
- break
- end
- end
- if ok then
- local function validate(s)
- s = file.collapse_path(s)
- return s ~= "" and not find(s,dummy_path_expr) and s
- end
- for k=1,#pathlist do
- splitpathexpr(pathlist[k],newlist,validate)
- end
- else
- for k=1,#pathlist do
- for p in gmatch(pathlist[k],"([^,]+)") do
- p = file.collapse_path(p)
- if p ~= "" then newlist[#newlist+1] = p end
- end
- end
- end
- return newlist
-end
-
--- we follow a rather traditional approach:
---
--- (1) texmf.cnf given in TEXMFCNF
--- (2) texmf.cnf searched in default variable
---
--- also we now follow the stupid route: if not set then just assume *one*
--- cnf file under texmf (i.e. distribution)
-
-local args = environment and environment.original_arguments or arg -- this needs a cleanup
-
-resolvers.ownbin = resolvers.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex"
-resolvers.ownbin = gsub(resolvers.ownbin,"\\","/")
-
-function resolvers.getownpath()
- local ownpath = resolvers.ownpath or os.selfdir
- if not ownpath or ownpath == "" or ownpath == "unset" then
- ownpath = args[-1] or arg[-1]
- ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/"))
- if not ownpath or ownpath == "" then
- ownpath = args[-0] or arg[-0]
- ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/"))
- end
- local binary = resolvers.ownbin
- if not ownpath or ownpath == "" then
- ownpath = ownpath and file.dirname(binary)
- end
- if not ownpath or ownpath == "" then
- if os.binsuffix ~= "" then
- binary = file.replacesuffix(binary,os.binsuffix)
- end
- for p in gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do
- local b = file.join(p,binary)
- if lfs.isfile(b) then
- -- we assume that after changing to the path the currentdir function
- -- resolves to the real location and use this side effect here; this
- -- trick is needed because on the mac installations use symlinks in the
- -- path instead of real locations
- local olddir = lfs.currentdir()
- if lfs.chdir(p) then
- local pp = lfs.currentdir()
- if trace_locating and p ~= pp then
- logs.report("fileio","following symlink '%s' to '%s'",p,pp)
- end
- ownpath = pp
- lfs.chdir(olddir)
- else
- if trace_locating then
- logs.report("fileio","unable to check path '%s'",p)
- end
- ownpath = p
- end
- break
- end
- end
- end
- if not ownpath or ownpath == "" then
- ownpath = "."
- logs.report("fileio","forcing fallback ownpath .")
- elseif trace_locating then
- logs.report("fileio","using ownpath '%s'",ownpath)
- end
- end
- resolvers.ownpath = ownpath
- function resolvers.getownpath()
- return resolvers.ownpath
- end
- return ownpath
-end
-
-local own_places = { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF" }
-
-local function identify_own()
- local ownpath = resolvers.getownpath() or dir.current()
- local ie = instance.environment
- if ownpath then
- if resolvers.env('SELFAUTOLOC') == "" then os.env['SELFAUTOLOC'] = file.collapse_path(ownpath) end
- if resolvers.env('SELFAUTODIR') == "" then os.env['SELFAUTODIR'] = file.collapse_path(ownpath .. "/..") end
- if resolvers.env('SELFAUTOPARENT') == "" then os.env['SELFAUTOPARENT'] = file.collapse_path(ownpath .. "/../..") end
- else
- logs.report("fileio","error: unable to locate ownpath")
- os.exit()
- end
- if resolvers.env('TEXMFCNF') == "" then os.env['TEXMFCNF'] = resolvers.cnfdefault end
- if resolvers.env('TEXOS') == "" then os.env['TEXOS'] = resolvers.env('SELFAUTODIR') end
- if resolvers.env('TEXROOT') == "" then os.env['TEXROOT'] = resolvers.env('SELFAUTOPARENT') end
- if trace_locating then
- for i=1,#own_places do
- local v = own_places[i]
- logs.report("fileio","variable '%s' set to '%s'",v,resolvers.env(v) or "unknown")
- end
- end
- identify_own = function() end
-end
-
-function resolvers.identify_cnf()
- if #instance.cnffiles == 0 then
- -- fallback
- identify_own()
- -- the real search
- resolvers.expand_variables()
- local t = resolvers.split_path(resolvers.env('TEXMFCNF'))
- t = expanded_path_from_list(t)
- expand_vars(t) -- redundant
- local function locate(filename,list)
- for i=1,#t do
- local ti = t[i]
- local texmfcnf = file.collapse_path(file.join(ti,filename))
- if lfs.isfile(texmfcnf) then
- list[#list+1] = texmfcnf
- end
- end
- end
- locate(resolvers.luaname,instance.luafiles)
- locate(resolvers.cnfname,instance.cnffiles)
- end
-end
-
-local function load_cnf_file(fname)
- fname = resolvers.clean_path(fname)
- local lname = file.replacesuffix(fname,'lua')
- if lfs.isfile(lname) then
- local dname = file.dirname(fname) -- fname ?
- if not instance.configuration[dname] then
- resolvers.load_data(dname,'configuration',lname and file.basename(lname))
- instance.order[#instance.order+1] = instance.configuration[dname]
- end
- else
- f = io.open(fname)
- if f then
- if trace_locating then
- logs.report("fileio","loading configuration file %s", fname)
- end
- local line, data, n, k, v
- local dname = file.dirname(fname)
- if not instance.configuration[dname] then
- instance.configuration[dname] = { }
- instance.order[#instance.order+1] = instance.configuration[dname]
- end
- local data = instance.configuration[dname]
- while true do
- local line, n = f:read(), 0
- if line then
- while true do -- join lines
- line, n = gsub(line,"\\%s*$", "")
- if n > 0 then
- line = line .. f:read()
- else
- break
- end
- end
- if not find(line,"^[%%#]") then
- local l = gsub(line,"%s*%%.*$","")
- local k, v = match(l,"%s*(.-)%s*=%s*(.-)%s*$")
- if k and v and not data[k] then
- v = gsub(v,"[%%#].*",'')
- data[k] = gsub(v,"~","$HOME")
- instance.kpsevars[k] = true
- end
- end
- else
- break
- end
- end
- f:close()
- elseif trace_locating then
- logs.report("fileio","skipping configuration file '%s'", fname)
- end
- end
-end
-
-local function collapse_cnf_data() -- potential optimization: pass start index (setup and configuration are shared)
- local order = instance.order
- for i=1,#order do
- local c = order[i]
- for k,v in next, c do
- if not instance.variables[k] then
- if instance.environment[k] then
- instance.variables[k] = instance.environment[k]
- else
- instance.kpsevars[k] = true
- instance.variables[k] = resolvers.bare_variable(v)
- end
- end
- end
- end
-end
-
-function resolvers.load_cnf()
- local function loadoldconfigdata()
- local cnffiles = instance.cnffiles
- for i=1,#cnffiles do
- load_cnf_file(cnffiles[i])
- end
- end
- -- instance.cnffiles contain complete names now !
- -- we still use a funny mix of cnf and new but soon
- -- we will switch to lua exclusively as we only use
- -- the file to collect the tree roots
- if #instance.cnffiles == 0 then
- if trace_locating then
- logs.report("fileio","no cnf files found (TEXMFCNF may not be set/known)")
- end
- else
- local cnffiles = instance.cnffiles
- instance.rootpath = cnffiles[1]
- for k=1,#cnffiles do
- instance.cnffiles[k] = file.collapse_path(cnffiles[k])
- end
- for i=1,3 do
- instance.rootpath = file.dirname(instance.rootpath)
- end
- instance.rootpath = file.collapse_path(instance.rootpath)
- if instance.diskcache and not instance.renewcache then
- resolvers.loadoldconfig(instance.cnffiles)
- if instance.loaderror then
- loadoldconfigdata()
- resolvers.saveoldconfig()
- end
- else
- loadoldconfigdata()
- if instance.renewcache then
- resolvers.saveoldconfig()
- end
- end
- collapse_cnf_data()
- end
- check_configuration()
-end
-
-function resolvers.load_lua()
- if #instance.luafiles == 0 then
- -- yet harmless
- else
- instance.rootpath = instance.luafiles[1]
- local luafiles = instance.luafiles
- for k=1,#luafiles do
- instance.luafiles[k] = file.collapse_path(luafiles[k])
- end
- for i=1,3 do
- instance.rootpath = file.dirname(instance.rootpath)
- end
- instance.rootpath = file.collapse_path(instance.rootpath)
- resolvers.loadnewconfig()
- collapse_cnf_data()
- end
- check_configuration()
-end
-
--- database loading
-
-function resolvers.load_hash()
- resolvers.locatelists()
- if instance.diskcache and not instance.renewcache then
- resolvers.loadfiles()
- if instance.loaderror then
- resolvers.loadlists()
- resolvers.savefiles()
- end
- else
- resolvers.loadlists()
- if instance.renewcache then
- resolvers.savefiles()
- end
- end
-end
-
-function resolvers.append_hash(type,tag,name)
- if trace_locating then
- logs.report("fileio","hash '%s' appended",tag)
- end
- insert(instance.hashes, { ['type']=type, ['tag']=tag, ['name']=name } )
-end
-
-function resolvers.prepend_hash(type,tag,name)
- if trace_locating then
- logs.report("fileio","hash '%s' prepended",tag)
- end
- insert(instance.hashes, 1, { ['type']=type, ['tag']=tag, ['name']=name } )
-end
-
-function resolvers.extend_texmf_var(specification) -- crap, we could better prepend the hash
--- local t = resolvers.expanded_path_list('TEXMF') -- full expansion
- local t = resolvers.split_path(resolvers.env('TEXMF'))
- insert(t,1,specification)
- local newspec = concat(t,";")
- if instance.environment["TEXMF"] then
- instance.environment["TEXMF"] = newspec
- elseif instance.variables["TEXMF"] then
- instance.variables["TEXMF"] = newspec
- else
- -- weird
- end
- resolvers.expand_variables()
- reset_hashes()
-end
-
--- locators
-
-function resolvers.locatelists()
- local texmfpaths = resolvers.clean_path_list('TEXMF')
- for i=1,#texmfpaths do
- local path = texmfpaths[i]
- if trace_locating then
- logs.report("fileio","locating list of '%s'",path)
- end
- resolvers.locatedatabase(file.collapse_path(path))
- end
-end
-
-function resolvers.locatedatabase(specification)
- return resolvers.methodhandler('locators', specification)
-end
-
-function resolvers.locators.tex(specification)
- if specification and specification ~= '' and lfs.isdir(specification) then
- if trace_locating then
- logs.report("fileio","tex locator '%s' found",specification)
- end
- resolvers.append_hash('file',specification,filename)
- elseif trace_locating then
- logs.report("fileio","tex locator '%s' not found",specification)
- end
-end
-
--- hashers
-
-function resolvers.hashdatabase(tag,name)
- return resolvers.methodhandler('hashers',tag,name)
-end
-
-function resolvers.loadfiles()
- instance.loaderror = false
- instance.files = { }
- if not instance.renewcache then
- local hashes = instance.hashes
- for k=1,#hashes do
- local hash = hashes[k]
- resolvers.hashdatabase(hash.tag,hash.name)
- if instance.loaderror then break end
- end
- end
-end
-
-function resolvers.hashers.tex(tag,name)
- resolvers.load_data(tag,'files')
-end
-
--- generators:
-
-function resolvers.loadlists()
- local hashes = instance.hashes
- for i=1,#hashes do
- resolvers.generatedatabase(hashes[i].tag)
- end
-end
-
-function resolvers.generatedatabase(specification)
- return resolvers.methodhandler('generators', specification)
-end
-
--- starting with . or .. etc or funny char
-
-local weird = lpeg.P(".")^1 + lpeg.anywhere(lpeg.S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
-
---~ local l_forbidden = lpeg.S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t")
---~ local l_confusing = lpeg.P(" ")
---~ local l_character = lpeg.patterns.utf8
---~ local l_dangerous = lpeg.P(".")
-
---~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * lpeg.P(-1)
---~ ----- l_normal = l_normal * lpeg.Cc(true) + lpeg.Cc(false)
-
---~ local function test(str)
---~ print(str,lpeg.match(l_normal,str))
---~ end
---~ test("ヒラギノ明朝 Pro W3")
---~ test("..ヒラギノ明朝 Pro W3")
---~ test(":ヒラギノ明朝 Pro W3;")
---~ test("ヒラギノ明朝 /Pro W3;")
---~ test("ヒラギノ明朝 Pro W3")
-
-function resolvers.generators.tex(specification)
- local tag = specification
- if trace_locating then
- logs.report("fileio","scanning path '%s'",specification)
- end
- instance.files[tag] = { }
- local files = instance.files[tag]
- local n, m, r = 0, 0, 0
- local spec = specification .. '/'
- local attributes = lfs.attributes
- local directory = lfs.dir
- local function action(path)
- local full
- if path then
- full = spec .. path .. '/'
- else
- full = spec
- end
- for name in directory(full) do
- if not lpegmatch(weird,name) then
- -- if lpegmatch(l_normal,name) then
- local mode = attributes(full..name,'mode')
- if mode == 'file' then
- if path then
- n = n + 1
- local f = files[name]
- if f then
- if type(f) == 'string' then
- files[name] = { f, path }
- else
- f[#f+1] = path
- end
- else -- probably unique anyway
- files[name] = path
- local lower = lower(name)
- if name ~= lower then
- files["remap:"..lower] = name
- r = r + 1
- end
- end
- end
- elseif mode == 'directory' then
- m = m + 1
- if path then
- action(path..'/'..name)
- else
- action(name)
- end
- end
- end
- end
- end
- action()
- if trace_locating then
- logs.report("fileio","%s files found on %s directories with %s uppercase remappings",n,m,r)
- end
-end
-
--- savers, todo
-
-function resolvers.savefiles()
- resolvers.save_data('files')
-end
-
--- A config (optionally) has the paths split in tables. Internally
--- we join them and split them after the expansion has taken place. This
--- is more convenient.
-
---~ local checkedsplit = string.checkedsplit
-
-local cache = { }
-
-local splitter = lpeg.Ct(lpeg.splitat(lpeg.S(os.type == "windows" and ";" or ":;")))
-
-local function split_kpse_path(str) -- beware, this can be either a path or a {specification}
- local found = cache[str]
- if not found then
- if str == "" then
- found = { }
- else
- str = gsub(str,"\\","/")
---~ local split = (find(str,";") and checkedsplit(str,";")) or checkedsplit(str,io.pathseparator)
-local split = lpegmatch(splitter,str)
- found = { }
- for i=1,#split do
- local s = split[i]
- if not find(s,"^{*unset}*") then
- found[#found+1] = s
- end
- end
- if trace_expansions then
- logs.report("fileio","splitting path specification '%s'",str)
- for k=1,#found do
- logs.report("fileio","% 4i: %s",k,found[k])
- end
- end
- cache[str] = found
- end
- end
- return found
-end
-
-resolvers.split_kpse_path = split_kpse_path
-
-function resolvers.splitconfig()
- for i=1,#instance do
- local c = instance[i]
- for k,v in next, c do
- if type(v) == 'string' then
- local t = split_kpse_path(v)
- if #t > 1 then
- c[k] = t
- end
- end
- end
- end
-end
-
-function resolvers.joinconfig()
- local order = instance.order
- for i=1,#order do
- local c = order[i]
- for k,v in next, c do -- indexed?
- if type(v) == 'table' then
- c[k] = file.join_path(v)
- end
- end
- end
-end
-
-function resolvers.split_path(str)
- if type(str) == 'table' then
- return str
- else
- return split_kpse_path(str)
- end
-end
-
-function resolvers.join_path(str)
- if type(str) == 'table' then
- return file.join_path(str)
- else
- return str
- end
-end
-
-function resolvers.splitexpansions()
- local ie = instance.expansions
- for k,v in next, ie do
- local t, h, p = { }, { }, split_kpse_path(v)
- for kk=1,#p do
- local vv = p[kk]
- if vv ~= "" and not h[vv] then
- t[#t+1] = vv
- h[vv] = true
- end
- end
- if #t > 1 then
- ie[k] = t
- else
- ie[k] = t[1]
- end
- end
-end
-
--- end of split/join code
-
-function resolvers.saveoldconfig()
- resolvers.splitconfig()
- resolvers.save_data('configuration')
- resolvers.joinconfig()
-end
-
-resolvers.configbanner = [[
--- This is a Luatex configuration file created by 'luatools.lua' or
--- 'luatex.exe' directly. For comment, suggestions and questions you can
--- contact the ConTeXt Development Team. This configuration file is
--- not copyrighted. [HH & TH]
-]]
-
-function resolvers.serialize(files)
- -- This version is somewhat optimized for the kind of
- -- tables that we deal with, so it's much faster than
- -- the generic serializer. This makes sense because
- -- luatools and mtxtools are called frequently. Okay,
- -- we pay a small price for properly tabbed tables.
- local t = { }
- local function dump(k,v,m) -- could be moved inline
- if type(v) == 'string' then
- return m .. "['" .. k .. "']='" .. v .. "',"
- elseif #v == 1 then
- return m .. "['" .. k .. "']='" .. v[1] .. "',"
- else
- return m .. "['" .. k .. "']={'" .. concat(v,"','").. "'},"
- end
- end
- t[#t+1] = "return {"
- if instance.sortdata then
- local sortedfiles = sortedkeys(files)
- for i=1,#sortedfiles do
- local k = sortedfiles[i]
- local fk = files[k]
- if type(fk) == 'table' then
- t[#t+1] = "\t['" .. k .. "']={"
- local sortedfk = sortedkeys(fk)
- for j=1,#sortedfk do
- local kk = sortedfk[j]
- t[#t+1] = dump(kk,fk[kk],"\t\t")
- end
- t[#t+1] = "\t},"
- else
- t[#t+1] = dump(k,fk,"\t")
- end
- end
- else
- for k, v in next, files do
- if type(v) == 'table' then
- t[#t+1] = "\t['" .. k .. "']={"
- for kk,vv in next, v do
- t[#t+1] = dump(kk,vv,"\t\t")
- end
- t[#t+1] = "\t},"
- else
- t[#t+1] = dump(k,v,"\t")
- end
- end
- end
- t[#t+1] = "}"
- return concat(t,"\n")
-end
-
-local data_state = { }
-
-function resolvers.data_state()
- return data_state or { }
-end
-
-function resolvers.save_data(dataname, makename) -- untested without cache overload
- for cachename, files in next, instance[dataname] do
- local name = (makename or file.join)(cachename,dataname)
- local luaname, lucname = name .. ".lua", name .. ".luc"
- if trace_locating then
- logs.report("fileio","preparing '%s' for '%s'",dataname,cachename)
- end
- for k, v in next, files do
- if type(v) == "table" and #v == 1 then
- files[k] = v[1]
- end
- end
- local data = {
- type = dataname,
- root = cachename,
- version = resolvers.cacheversion,
- date = os.date("%Y-%m-%d"),
- time = os.date("%H:%M:%S"),
- content = files,
- uuid = os.uuid(),
- }
- local ok = io.savedata(luaname,resolvers.serialize(data))
- if ok then
- if trace_locating then
- logs.report("fileio","'%s' saved in '%s'",dataname,luaname)
- end
- if utils.lua.compile(luaname,lucname,false,true) then -- no cleanup but strip
- if trace_locating then
- logs.report("fileio","'%s' compiled to '%s'",dataname,lucname)
- end
- else
- if trace_locating then
- logs.report("fileio","compiling failed for '%s', deleting file '%s'",dataname,lucname)
- end
- os.remove(lucname)
- end
- elseif trace_locating then
- logs.report("fileio","unable to save '%s' in '%s' (access error)",dataname,luaname)
- end
- end
-end
-
-function resolvers.load_data(pathname,dataname,filename,makename) -- untested without cache overload
- filename = ((not filename or (filename == "")) and dataname) or filename
- filename = (makename and makename(dataname,filename)) or file.join(pathname,filename)
- local blob = loadfile(filename .. ".luc") or loadfile(filename .. ".lua")
- if blob then
- local data = blob()
- if data and data.content and data.type == dataname and data.version == resolvers.cacheversion then
- data_state[#data_state+1] = data.uuid
- if trace_locating then
- logs.report("fileio","loading '%s' for '%s' from '%s'",dataname,pathname,filename)
- end
- instance[dataname][pathname] = data.content
- else
- if trace_locating then
- logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename)
- end
- instance[dataname][pathname] = { }
- instance.loaderror = true
- end
- elseif trace_locating then
- logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename)
- end
-end
-
--- some day i'll use the nested approach, but not yet (actually we even drop
--- engine/progname support since we have only luatex now)
---
--- first texmfcnf.lua files are located, next the cached texmf.cnf files
---
--- return {
--- TEXMFBOGUS = 'effe checken of dit werkt',
--- }
-
-function resolvers.resetconfig()
- identify_own()
- instance.configuration, instance.setup, instance.order, instance.loaderror = { }, { }, { }, false
-end
-
-function resolvers.loadnewconfig()
- local luafiles = instance.luafiles
- for i=1,#luafiles do
- local cnf = luafiles[i]
- local pathname = file.dirname(cnf)
- local filename = file.join(pathname,resolvers.luaname)
- local blob = loadfile(filename)
- if blob then
- local data = blob()
- if data then
- if trace_locating then
- logs.report("fileio","loading configuration file '%s'",filename)
- end
- if true then
- -- flatten to variable.progname
- local t = { }
- for k, v in next, data do -- v = progname
- if type(v) == "string" then
- t[k] = v
- else
- for kk, vv in next, v do -- vv = variable
- if type(vv) == "string" then
- t[vv.."."..v] = kk
- end
- end
- end
- end
- instance['setup'][pathname] = t
- else
- instance['setup'][pathname] = data
- end
- else
- if trace_locating then
- logs.report("fileio","skipping configuration file '%s'",filename)
- end
- instance['setup'][pathname] = { }
- instance.loaderror = true
- end
- elseif trace_locating then
- logs.report("fileio","skipping configuration file '%s'",filename)
- end
- instance.order[#instance.order+1] = instance.setup[pathname]
- if instance.loaderror then break end
- end
-end
-
-function resolvers.loadoldconfig()
- if not instance.renewcache then
- local cnffiles = instance.cnffiles
- for i=1,#cnffiles do
- local cnf = cnffiles[i]
- local dname = file.dirname(cnf)
- resolvers.load_data(dname,'configuration')
- instance.order[#instance.order+1] = instance.configuration[dname]
- if instance.loaderror then break end
- end
- end
- resolvers.joinconfig()
-end
-
-function resolvers.expand_variables()
- local expansions, environment, variables = { }, instance.environment, instance.variables
- local env = resolvers.env
- instance.expansions = expansions
- if instance.engine ~= "" then environment['engine'] = instance.engine end
- if instance.progname ~= "" then environment['progname'] = instance.progname end
- for k,v in next, environment do
- local a, b = match(k,"^(%a+)%_(.*)%s*$")
- if a and b then
- expansions[a..'.'..b] = v
- else
- expansions[k] = v
- end
- end
- for k,v in next, environment do -- move environment to expansions
- if not expansions[k] then expansions[k] = v end
- end
- for k,v in next, variables do -- move variables to expansions
- if not expansions[k] then expansions[k] = v end
- end
- local busy = false
- local function resolve(a)
- busy = true
- return expansions[a] or env(a)
- end
- while true do
- busy = false
- for k,v in next, expansions do
- local s, n = gsub(v,"%$([%a%d%_%-]+)",resolve)
- local s, m = gsub(s,"%$%{([%a%d%_%-]+)%}",resolve)
- if n > 0 or m > 0 then
- expansions[k]= s
- end
- end
- if not busy then break end
- end
- for k,v in next, expansions do
- expansions[k] = gsub(v,"\\", '/')
- end
-end
-
-function resolvers.variable(name)
- return entry(instance.variables,name)
-end
-
-function resolvers.expansion(name)
- return entry(instance.expansions,name)
-end
-
-function resolvers.is_variable(name)
- return is_entry(instance.variables,name)
-end
-
-function resolvers.is_expansion(name)
- return is_entry(instance.expansions,name)
-end
-
-function resolvers.unexpanded_path_list(str)
- local pth = resolvers.variable(str)
- local lst = resolvers.split_path(pth)
- return expanded_path_from_list(lst)
-end
-
-function resolvers.unexpanded_path(str)
- return file.join_path(resolvers.unexpanded_path_list(str))
-end
-
-do -- no longer needed
-
- local done = { }
-
- function resolvers.reset_extra_path()
- local ep = instance.extra_paths
- if not ep then
- ep, done = { }, { }
- instance.extra_paths = ep
- elseif #ep > 0 then
- instance.lists, done = { }, { }
- end
- end
-
- function resolvers.register_extra_path(paths,subpaths)
- local ep = instance.extra_paths or { }
- local n = #ep
- if paths and paths ~= "" then
- if subpaths and subpaths ~= "" then
- for p in gmatch(paths,"[^,]+") do
- -- we gmatch each step again, not that fast, but used seldom
- for s in gmatch(subpaths,"[^,]+") do
- local ps = p .. "/" .. s
- if not done[ps] then
- ep[#ep+1] = resolvers.clean_path(ps)
- done[ps] = true
- end
- end
- end
- else
- for p in gmatch(paths,"[^,]+") do
- if not done[p] then
- ep[#ep+1] = resolvers.clean_path(p)
- done[p] = true
- end
- end
- end
- elseif subpaths and subpaths ~= "" then
- for i=1,n do
- -- we gmatch each step again, not that fast, but used seldom
- for s in gmatch(subpaths,"[^,]+") do
- local ps = ep[i] .. "/" .. s
- if not done[ps] then
- ep[#ep+1] = resolvers.clean_path(ps)
- done[ps] = true
- end
- end
- end
- end
- if #ep > 0 then
- instance.extra_paths = ep -- register paths
- end
- if #ep > n then
- instance.lists = { } -- erase the cache
- end
- end
-
-end
-
-local function made_list(instance,list)
- local ep = instance.extra_paths
- if not ep or #ep == 0 then
- return list
- else
- local done, new = { }, { }
- -- honour . .. ../.. but only when at the start
- for k=1,#list do
- local v = list[k]
- if not done[v] then
- if find(v,"^[%.%/]$") then
- done[v] = true
- new[#new+1] = v
- else
- break
- end
- end
- end
- -- first the extra paths
- for k=1,#ep do
- local v = ep[k]
- if not done[v] then
- done[v] = true
- new[#new+1] = v
- end
- end
- -- next the formal paths
- for k=1,#list do
- local v = list[k]
- if not done[v] then
- done[v] = true
- new[#new+1] = v
- end
- end
- return new
- end
-end
-
-function resolvers.clean_path_list(str)
- local t = resolvers.expanded_path_list(str)
- if t then
- for i=1,#t do
- t[i] = file.collapse_path(resolvers.clean_path(t[i]))
- end
- end
- return t
-end
-
-function resolvers.expand_path(str)
- return file.join_path(resolvers.expanded_path_list(str))
-end
-
-function resolvers.expanded_path_list(str)
- if not str then
- return ep or { } -- ep ?
- elseif instance.savelists then
- -- engine+progname hash
- str = gsub(str,"%$","")
- if not instance.lists[str] then -- cached
- local lst = made_list(instance,resolvers.split_path(resolvers.expansion(str)))
- instance.lists[str] = expanded_path_from_list(lst)
- end
- return instance.lists[str]
- else
- local lst = resolvers.split_path(resolvers.expansion(str))
- return made_list(instance,expanded_path_from_list(lst))
- end
-end
-
-function resolvers.expanded_path_list_from_var(str) -- brrr
- local tmp = resolvers.var_of_format_or_suffix(gsub(str,"%$",""))
- if tmp ~= "" then
- return resolvers.expanded_path_list(tmp)
- else
- return resolvers.expanded_path_list(str)
- end
-end
-
-function resolvers.expand_path_from_var(str)
- return file.join_path(resolvers.expanded_path_list_from_var(str))
-end
-
-function resolvers.format_of_var(str)
- return formats[str] or formats[alternatives[str]] or ''
-end
-function resolvers.format_of_suffix(str)
- return suffixmap[file.extname(str)] or 'tex'
-end
-
-function resolvers.variable_of_format(str)
- return formats[str] or formats[alternatives[str]] or ''
-end
-
-function resolvers.var_of_format_or_suffix(str)
- local v = formats[str]
- if v then
- return v
- end
- v = formats[alternatives[str]]
- if v then
- return v
- end
- v = suffixmap[file.extname(str)]
- if v then
- return formats[isf]
- end
- return ''
-end
-
-function resolvers.expand_braces(str) -- output variable and brace expansion of STRING
- local ori = resolvers.variable(str)
- local pth = expanded_path_from_list(resolvers.split_path(ori))
- return file.join_path(pth)
-end
-
-resolvers.isreadable = { }
-
-function resolvers.isreadable.file(name)
- local readable = lfs.isfile(name) -- brrr
- if trace_detail then
- if readable then
- logs.report("fileio","file '%s' is readable",name)
- else
- logs.report("fileio","file '%s' is not readable", name)
- end
- end
- return readable
-end
-
-resolvers.isreadable.tex = resolvers.isreadable.file
-
--- name
--- name/name
-
-local function collect_files(names)
- local filelist = { }
- for k=1,#names do
- local fname = names[k]
- if trace_detail then
- logs.report("fileio","checking name '%s'",fname)
- end
- local bname = file.basename(fname)
- local dname = file.dirname(fname)
- if dname == "" or find(dname,"^%.") then
- dname = false
- else
- dname = "/" .. dname .. "$"
- end
- local hashes = instance.hashes
- for h=1,#hashes do
- local hash = hashes[h]
- local blobpath = hash.tag
- local files = blobpath and instance.files[blobpath]
- if files then
- if trace_detail then
- logs.report("fileio","deep checking '%s' (%s)",blobpath,bname)
- end
- local blobfile = files[bname]
- if not blobfile then
- local rname = "remap:"..bname
- blobfile = files[rname]
- if blobfile then
- bname = files[rname]
- blobfile = files[bname]
- end
- end
- if blobfile then
- if type(blobfile) == 'string' then
- if not dname or find(blobfile,dname) then
- filelist[#filelist+1] = {
- hash.type,
- file.join(blobpath,blobfile,bname), -- search
- resolvers.concatinators[hash.type](blobpath,blobfile,bname) -- result
- }
- end
- else
- for kk=1,#blobfile do
- local vv = blobfile[kk]
- if not dname or find(vv,dname) then
- filelist[#filelist+1] = {
- hash.type,
- file.join(blobpath,vv,bname), -- search
- resolvers.concatinators[hash.type](blobpath,vv,bname) -- result
- }
- end
- end
- end
- end
- elseif trace_locating then
- logs.report("fileio","no match in '%s' (%s)",blobpath,bname)
- end
- end
- end
- if #filelist > 0 then
- return filelist
- else
- return nil
- end
-end
-
-function resolvers.suffix_of_format(str)
- if suffixes[str] then
- return suffixes[str][1]
- else
- return ""
- end
-end
-
-function resolvers.suffixes_of_format(str)
- if suffixes[str] then
- return suffixes[str]
- else
- return {}
- end
-end
-
-function resolvers.register_in_trees(name)
- if not find(name,"^%.") then
- instance.foundintrees[name] = (instance.foundintrees[name] or 0) + 1 -- maybe only one
- end
-end
-
--- split the next one up for readability (bu this module needs a cleanup anyway)
-
-local function can_be_dir(name) -- can become local
- local fakepaths = instance.fakepaths
- if not fakepaths[name] then
- if lfs.isdir(name) then
- fakepaths[name] = 1 -- directory
- else
- fakepaths[name] = 2 -- no directory
- end
- end
- return (fakepaths[name] == 1)
-end
-
-local function collect_instance_files(filename,collected) -- todo : plugin (scanners, checkers etc)
- local result = collected or { }
- local stamp = nil
- filename = file.collapse_path(filename)
- -- speed up / beware: format problem
- if instance.remember then
- stamp = filename .. "--" .. instance.engine .. "--" .. instance.progname .. "--" .. instance.format
- if instance.found[stamp] then
- if trace_locating then
- logs.report("fileio","remembering file '%s'",filename)
- end
- return instance.found[stamp]
- end
- end
- if not dangerous[instance.format or "?"] then
- if resolvers.isreadable.file(filename) then
- if trace_detail then
- logs.report("fileio","file '%s' found directly",filename)
- end
- instance.found[stamp] = { filename }
- return { filename }
- end
- end
- if find(filename,'%*') then
- if trace_locating then
- logs.report("fileio","checking wildcard '%s'", filename)
- end
- result = resolvers.find_wildcard_files(filename)
- elseif file.is_qualified_path(filename) then
- if resolvers.isreadable.file(filename) then
- if trace_locating then
- logs.report("fileio","qualified name '%s'", filename)
- end
- result = { filename }
- else
- local forcedname, ok, suffix = "", false, file.extname(filename)
- if suffix == "" then -- why
- if instance.format == "" then
- forcedname = filename .. ".tex"
- if resolvers.isreadable.file(forcedname) then
- if trace_locating then
- logs.report("fileio","no suffix, forcing standard filetype 'tex'")
- end
- result, ok = { forcedname }, true
- end
- else
- local suffixes = resolvers.suffixes_of_format(instance.format)
- for _, s in next, suffixes do
- forcedname = filename .. "." .. s
- if resolvers.isreadable.file(forcedname) then
- if trace_locating then
- logs.report("fileio","no suffix, forcing format filetype '%s'", s)
- end
- result, ok = { forcedname }, true
- break
- end
- end
- end
- end
- if not ok and suffix ~= "" then
- -- try to find in tree (no suffix manipulation), here we search for the
- -- matching last part of the name
- local basename = file.basename(filename)
- local pattern = gsub(filename .. "$","([%.%-])","%%%1")
- local savedformat = instance.format
- local format = savedformat or ""
- if format == "" then
- instance.format = resolvers.format_of_suffix(suffix)
- end
- if not format then
- instance.format = "othertextfiles" -- kind of everything, maybe texinput is better
- end
- --
- if basename ~= filename then
- local resolved = collect_instance_files(basename)
- if #result == 0 then
- local lowered = lower(basename)
- if filename ~= lowered then
- resolved = collect_instance_files(lowered)
- end
- end
- resolvers.format = savedformat
- --
- for r=1,#resolved do
- local rr = resolved[r]
- if find(rr,pattern) then
- result[#result+1], ok = rr, true
- end
- end
- end
- -- a real wildcard:
- --
- -- if not ok then
- -- local filelist = collect_files({basename})
- -- for f=1,#filelist do
- -- local ff = filelist[f][3] or ""
- -- if find(ff,pattern) then
- -- result[#result+1], ok = ff, true
- -- end
- -- end
- -- end
- end
- if not ok and trace_locating then
- logs.report("fileio","qualified name '%s'", filename)
- end
- end
- else
- -- search spec
- local filetype, extra, done, wantedfiles, ext = '', nil, false, { }, file.extname(filename)
- if ext == "" then
- if not instance.force_suffixes then
- wantedfiles[#wantedfiles+1] = filename
- end
- else
- wantedfiles[#wantedfiles+1] = filename
- end
- if instance.format == "" then
- if ext == "" then
- local forcedname = filename .. '.tex'
- wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.format_of_suffix(forcedname)
- if trace_locating then
- logs.report("fileio","forcing filetype '%s'",filetype)
- end
- else
- filetype = resolvers.format_of_suffix(filename)
- if trace_locating then
- logs.report("fileio","using suffix based filetype '%s'",filetype)
- end
- end
- else
- if ext == "" then
- local suffixes = resolvers.suffixes_of_format(instance.format)
- for _, s in next, suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. s
- end
- end
- filetype = instance.format
- if trace_locating then
- logs.report("fileio","using given filetype '%s'",filetype)
- end
- end
- local typespec = resolvers.variable_of_format(filetype)
- local pathlist = resolvers.expanded_path_list(typespec)
- if not pathlist or #pathlist == 0 then
- -- no pathlist, access check only / todo == wildcard
- if trace_detail then
- logs.report("fileio","checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
- end
- for k=1,#wantedfiles do
- local fname = wantedfiles[k]
- if fname and resolvers.isreadable.file(fname) then
- filename, done = fname, true
- result[#result+1] = file.join('.',fname)
- break
- end
- end
- -- this is actually 'other text files' or 'any' or 'whatever'
- local filelist = collect_files(wantedfiles)
- local fl = filelist and filelist[1]
- if fl then
- filename = fl[3]
- result[#result+1] = filename
- done = true
- end
- else
- -- list search
- local filelist = collect_files(wantedfiles)
- local dirlist = { }
- if filelist then
- for i=1,#filelist do
- dirlist[i] = file.dirname(filelist[i][2]) .. "/"
- end
- end
- if trace_detail then
- logs.report("fileio","checking filename '%s'",filename)
- end
- -- a bit messy ... esp the doscan setting here
- local doscan
- for k=1,#pathlist do
- local path = pathlist[k]
- if find(path,"^!!") then doscan = false else doscan = true end
- local pathname = gsub(path,"^!+", '')
- done = false
- -- using file list
- if filelist then
- local expression
- -- compare list entries with permitted pattern -- /xx /xx//
- if not find(pathname,"/$") then
- expression = pathname .. "/"
- else
- expression = pathname
- end
- expression = gsub(expression,"([%-%.])","%%%1") -- this also influences
- expression = gsub(expression,"//+$", '/.*') -- later usage of pathname
- expression = gsub(expression,"//", '/.-/') -- not ok for /// but harmless
- expression = "^" .. expression .. "$"
- if trace_detail then
- logs.report("fileio","using pattern '%s' for path '%s'",expression,pathname)
- end
- for k=1,#filelist do
- local fl = filelist[k]
- local f = fl[2]
- local d = dirlist[k]
- if find(d,expression) then
- --- todo, test for readable
- result[#result+1] = fl[3]
- resolvers.register_in_trees(f) -- for tracing used files
- done = true
- if instance.allresults then
- if trace_detail then
- logs.report("fileio","match in hash for file '%s' on path '%s', continue scanning",f,d)
- end
- else
- if trace_detail then
- logs.report("fileio","match in hash for file '%s' on path '%s', quit scanning",f,d)
- end
- break
- end
- elseif trace_detail then
- logs.report("fileio","no match in hash for file '%s' on path '%s'",f,d)
- end
- end
- end
- if not done and doscan then
- -- check if on disk / unchecked / does not work at all / also zips
- if resolvers.splitmethod(pathname).scheme == 'file' then -- ?
- local pname = gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
- local ppname = gsub(pname,"/+$","")
- if can_be_dir(ppname) then
- for k=1,#wantedfiles do
- local w = wantedfiles[k]
- local fname = file.join(ppname,w)
- if resolvers.isreadable.file(fname) then
- if trace_detail then
- logs.report("fileio","found '%s' by scanning",fname)
- end
- result[#result+1] = fname
- done = true
- if not instance.allresults then break end
- end
- end
- else
- -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
- end
- end
- end
- end
- if not done and doscan then
- -- todo: slow path scanning
- end
- if done and not instance.allresults then break end
- end
- end
- end
- for k=1,#result do
- result[k] = file.collapse_path(result[k])
- end
- if instance.remember then
- instance.found[stamp] = result
- end
- return result
-end
-
-if not resolvers.concatinators then resolvers.concatinators = { } end
-
-resolvers.concatinators.tex = file.join
-resolvers.concatinators.file = resolvers.concatinators.tex
-
-function resolvers.find_files(filename,filetype,mustexist)
- if type(mustexist) == boolean then
- -- all set
- elseif type(filetype) == 'boolean' then
- filetype, mustexist = nil, false
- elseif type(filetype) ~= 'string' then
- filetype, mustexist = nil, false
- end
- instance.format = filetype or ''
- local result = collect_instance_files(filename)
- if #result == 0 then
- local lowered = lower(filename)
- if filename ~= lowered then
- return collect_instance_files(lowered)
- end
- end
- instance.format = ''
- return result
-end
-
-function resolvers.find_file(filename,filetype,mustexist)
- return (resolvers.find_files(filename,filetype,mustexist)[1] or "")
-end
-
-function resolvers.find_given_files(filename)
- local bname, result = file.basename(filename), { }
- local hashes = instance.hashes
- for k=1,#hashes do
- local hash = hashes[k]
- local files = instance.files[hash.tag] or { }
- local blist = files[bname]
- if not blist then
- local rname = "remap:"..bname
- blist = files[rname]
- if blist then
- bname = files[rname]
- blist = files[bname]
- end
- end
- if blist then
- if type(blist) == 'string' then
- result[#result+1] = resolvers.concatinators[hash.type](hash.tag,blist,bname) or ""
- if not instance.allresults then break end
- else
- for kk=1,#blist do
- local vv = blist[kk]
- result[#result+1] = resolvers.concatinators[hash.type](hash.tag,vv,bname) or ""
- if not instance.allresults then break end
- end
- end
- end
- end
- return result
-end
-
-function resolvers.find_given_file(filename)
- return (resolvers.find_given_files(filename)[1] or "")
-end
-
-local function doit(path,blist,bname,tag,kind,result,allresults)
- local done = false
- if blist and kind then
- if type(blist) == 'string' then
- -- make function and share code
- if find(lower(blist),path) then
- result[#result+1] = resolvers.concatinators[kind](tag,blist,bname) or ""
- done = true
- end
- else
- for kk=1,#blist do
- local vv = blist[kk]
- if find(lower(vv),path) then
- result[#result+1] = resolvers.concatinators[kind](tag,vv,bname) or ""
- done = true
- if not allresults then break end
- end
- end
- end
- end
- return done
-end
-
-function resolvers.find_wildcard_files(filename) -- todo: remap:
- local result = { }
- local bname, dname = file.basename(filename), file.dirname(filename)
- local path = gsub(dname,"^*/","")
- path = gsub(path,"*",".*")
- path = gsub(path,"-","%%-")
- if dname == "" then
- path = ".*"
- end
- local name = bname
- name = gsub(name,"*",".*")
- name = gsub(name,"-","%%-")
- path = lower(path)
- name = lower(name)
- local files, allresults, done = instance.files, instance.allresults, false
- if find(name,"%*") then
- local hashes = instance.hashes
- for k=1,#hashes do
- local hash = hashes[k]
- local tag, kind = hash.tag, hash.type
- for kk, hh in next, files[hash.tag] do
- if not find(kk,"^remap:") then
- if find(lower(kk),name) then
- if doit(path,hh,kk,tag,kind,result,allresults) then done = true end
- if done and not allresults then break end
- end
- end
- end
- end
- else
- local hashes = instance.hashes
- for k=1,#hashes do
- local hash = hashes[k]
- local tag, kind = hash.tag, hash.type
- if doit(path,files[tag][bname],bname,tag,kind,result,allresults) then done = true end
- if done and not allresults then break end
- end
- end
- -- we can consider also searching the paths not in the database, but then
- -- we end up with a messy search (all // in all path specs)
- return result
-end
-
-function resolvers.find_wildcard_file(filename)
- return (resolvers.find_wildcard_files(filename)[1] or "")
-end
-
--- main user functions
-
-function resolvers.automount()
- -- implemented later
-end
-
-function resolvers.load(option)
- statistics.starttiming(instance)
- resolvers.resetconfig()
- resolvers.identify_cnf()
- resolvers.load_lua() -- will become the new method
- resolvers.expand_variables()
- resolvers.load_cnf() -- will be skipped when we have a lua file
- resolvers.expand_variables()
- if option ~= "nofiles" then
- resolvers.load_hash()
- resolvers.automount()
- end
- statistics.stoptiming(instance)
-end
-
-function resolvers.for_files(command, files, filetype, mustexist)
- if files and #files > 0 then
- local function report(str)
- if trace_locating then
- logs.report("fileio",str) -- has already verbose
- else
- print(str)
- end
- end
- if trace_locating then
- report('') -- ?
- end
- for f=1,#files do
- local file = files[f]
- local result = command(file,filetype,mustexist)
- if type(result) == 'string' then
- report(result)
- else
- for i=1,#result do
- report(result[i]) -- could be unpack
- end
- end
- end
- end
-end
-
--- strtab
-
-resolvers.var_value = resolvers.variable -- output the value of variable $STRING.
-resolvers.expand_var = resolvers.expansion -- output variable expansion of STRING.
-
-function resolvers.show_path(str) -- output search path for file type NAME
- return file.join_path(resolvers.expanded_path_list(resolvers.format_of_var(str)))
-end
-
--- resolvers.find_file(filename)
--- resolvers.find_file(filename, filetype, mustexist)
--- resolvers.find_file(filename, mustexist)
--- resolvers.find_file(filename, filetype)
-
-function resolvers.register_file(files, name, path)
- if files[name] then
- if type(files[name]) == 'string' then
- files[name] = { files[name], path }
- else
- files[name] = path
- end
- else
- files[name] = path
- end
-end
-
-function resolvers.splitmethod(filename)
- if not filename then
- return { } -- safeguard
- elseif type(filename) == "table" then
- return filename -- already split
- elseif not find(filename,"://") then
- return { scheme="file", path = filename, original=filename } -- quick hack
- else
- return url.hashed(filename)
- end
-end
-
-function table.sequenced(t,sep) -- temp here
- local s = { }
- for k, v in next, t do -- indexed?
- s[#s+1] = k .. "=" .. tostring(v)
- end
- return concat(s, sep or " | ")
-end
-
-function resolvers.methodhandler(what, filename, filetype) -- ...
- filename = file.collapse_path(filename)
- local specification = (type(filename) == "string" and resolvers.splitmethod(filename)) or filename -- no or { }, let it bomb
- local scheme = specification.scheme
- if resolvers[what][scheme] then
- if trace_locating then
- logs.report("fileio","handler '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification))
- end
- return resolvers[what][scheme](filename,filetype) -- todo: specification
- else
- return resolvers[what].tex(filename,filetype) -- todo: specification
- end
-end
-
-function resolvers.clean_path(str)
- if str then
- str = gsub(str,"\\","/")
- str = gsub(str,"^!+","")
- str = gsub(str,"^~",resolvers.homedir)
- return str
- else
- return nil
- end
-end
-
-function resolvers.do_with_path(name,func)
- local pathlist = resolvers.expanded_path_list(name)
- for i=1,#pathlist do
- func("^"..resolvers.clean_path(pathlist[i]))
- end
-end
-
-function resolvers.do_with_var(name,func)
- func(expanded_var(name))
-end
-
-function resolvers.with_files(pattern,handle)
- local hashes = instance.hashes
- for i=1,#hashes do
- local hash = hashes[i]
- local blobpath = hash.tag
- local blobtype = hash.type
- if blobpath then
- local files = instance.files[blobpath]
- if files then
- for k,v in next, files do
- if find(k,"^remap:") then
- k = files[k]
- v = files[k] -- chained
- end
- if find(k,pattern) then
- if type(v) == "string" then
- handle(blobtype,blobpath,v,k)
- else
- for _,vv in next, v do -- indexed
- handle(blobtype,blobpath,vv,k)
- end
- end
- end
- end
- end
- end
- end
-end
-
-function resolvers.locate_format(name)
- local barename, fmtname = gsub(name,"%.%a+$",""), ""
- if resolvers.usecache then
- local path = file.join(caches.setpath("formats")) -- maybe platform
- fmtname = file.join(path,barename..".fmt") or ""
- end
- if fmtname == "" then
- fmtname = resolvers.find_files(barename..".fmt")[1] or ""
- end
- fmtname = resolvers.clean_path(fmtname)
- if fmtname ~= "" then
- local barename = file.removesuffix(fmtname)
- local luaname, lucname, luiname = barename .. ".lua", barename .. ".luc", barename .. ".lui"
- if lfs.isfile(luiname) then
- return barename, luiname
- elseif lfs.isfile(lucname) then
- return barename, lucname
- elseif lfs.isfile(luaname) then
- return barename, luaname
- end
- end
- return nil, nil
-end
-
-function resolvers.boolean_variable(str,default)
- local b = resolvers.expansion(str)
- if b == "" then
- return default
- else
- b = toboolean(b)
- return (b == nil and default) or b
- end
-end
-
-texconfig.kpse_init = false
-
-kpse = { original = kpse } setmetatable(kpse, { __index = function(k,v) return resolvers[v] end } )
-
--- for a while
-
-input = resolvers
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['data-tmp'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-<p>This module deals with caching data. It sets up the paths and
-implements loaders and savers for tables. Best is to set the
-following variable. When not set, the usual paths will be
-checked. Personally I prefer the (users) temporary path.</p>
-
-</code>
-TEXMFCACHE=$TMP;$TEMP;$TMPDIR;$TEMPDIR;$HOME;$TEXMFVAR;$VARTEXMF;.
-</code>
-
-<p>Currently we do no locking when we write files. This is no real
-problem because most caching involves fonts and the chance of them
-being written at the same time is small. We also need to extend
-luatools with a recache feature.</p>
---ldx]]--
-
-local format, lower, gsub = string.format, string.lower, string.gsub
-
-local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) -- not used yet
-
-caches = caches or { }
-
-caches.path = caches.path or nil
-caches.base = caches.base or "luatex-cache"
-caches.more = caches.more or "context"
-caches.direct = false -- true is faster but may need huge amounts of memory
-caches.tree = false
-caches.paths = caches.paths or nil
-caches.force = false
-caches.defaults = { "TEXMFCACHE", "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" }
-
-function caches.temp()
- local cachepath = nil
- local function check(list,isenv)
- if not cachepath then
- for k=1,#list do
- local v = list[k]
- cachepath = (isenv and (os.env[v] or "")) or v or ""
- if cachepath == "" then
- -- next
- else
- cachepath = resolvers.clean_path(cachepath)
- if lfs.isdir(cachepath) and file.iswritable(cachepath) then -- lfs.attributes(cachepath,"mode") == "directory"
- break
- elseif caches.force or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
- dir.mkdirs(cachepath)
- if lfs.isdir(cachepath) and file.iswritable(cachepath) then
- break
- end
- end
- end
- cachepath = nil
- end
- end
- end
- check(resolvers.clean_path_list("TEXMFCACHE") or { })
- check(caches.defaults,true)
- if not cachepath then
- print("\nfatal error: there is no valid (writable) cache path defined\n")
- os.exit()
- elseif not lfs.isdir(cachepath) then -- lfs.attributes(cachepath,"mode") ~= "directory"
- print(format("\nfatal error: cache path %s is not a directory\n",cachepath))
- os.exit()
- end
- cachepath = file.collapse_path(cachepath)
- function caches.temp()
- return cachepath
- end
- return cachepath
-end
-
-function caches.configpath()
- return table.concat(resolvers.instance.cnffiles,";")
-end
-
-function caches.hashed(tree)
- return md5.hex(gsub(lower(tree),"[\\\/]+","/"))
-end
-
-function caches.treehash()
- local tree = caches.configpath()
- if not tree or tree == "" then
- return false
- else
- return caches.hashed(tree)
- end
-end
-
-function caches.setpath(...)
- if not caches.path then
- if not caches.path then
- caches.path = caches.temp()
- end
- caches.path = resolvers.clean_path(caches.path) -- to be sure
- caches.tree = caches.tree or caches.treehash()
- if caches.tree then
- caches.path = dir.mkdirs(caches.path,caches.base,caches.more,caches.tree)
- else
- caches.path = dir.mkdirs(caches.path,caches.base,caches.more)
- end
- end
- if not caches.path then
- caches.path = '.'
- end
- caches.path = resolvers.clean_path(caches.path)
- local dirs = { ... }
- if #dirs > 0 then
- local pth = dir.mkdirs(caches.path,...)
- return pth
- end
- caches.path = dir.expand_name(caches.path)
- return caches.path
-end
-
-function caches.definepath(category,subcategory)
- return function()
- return caches.setpath(category,subcategory)
- end
-end
-
-function caches.setluanames(path,name)
- return path .. "/" .. name .. ".tma", path .. "/" .. name .. ".tmc"
-end
-
-function caches.loaddata(path,name)
- local tmaname, tmcname = caches.setluanames(path,name)
- local loader = loadfile(tmcname) or loadfile(tmaname)
- if loader then
- loader = loader()
- collectgarbage("step")
- return loader
- else
- return false
- end
-end
-
---~ function caches.loaddata(path,name)
---~ local tmaname, tmcname = caches.setluanames(path,name)
---~ return dofile(tmcname) or dofile(tmaname)
---~ end
-
-function caches.iswritable(filepath,filename)
- local tmaname, tmcname = caches.setluanames(filepath,filename)
- return file.iswritable(tmaname)
-end
-
-function caches.savedata(filepath,filename,data,raw)
- local tmaname, tmcname = caches.setluanames(filepath,filename)
- local reduce, simplify = true, true
- if raw then
- reduce, simplify = false, false
- end
- data.cache_uuid = os.uuid()
- if caches.direct then
- file.savedata(tmaname, table.serialize(data,'return',false,true,false)) -- no hex
- else
- table.tofile(tmaname, data,'return',false,true,false) -- maybe not the last true
- end
- local cleanup = resolvers.boolean_variable("PURGECACHE", false)
- local strip = resolvers.boolean_variable("LUACSTRIP", true)
- utils.lua.compile(tmaname, tmcname, cleanup, strip)
-end
-
--- here we use the cache for format loading (texconfig.[formatname|jobname])
-
---~ if tex and texconfig and texconfig.formatname and texconfig.formatname == "" then
-if tex and texconfig and (not texconfig.formatname or texconfig.formatname == "") and input and resolvers.instance then
- if not texconfig.luaname then texconfig.luaname = "cont-en.lua" end -- or luc
- texconfig.formatname = caches.setpath("formats") .. "/" .. gsub(texconfig.luaname,"%.lu.$",".fmt")
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['data-inp'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-resolvers.finders = resolvers.finders or { }
-resolvers.openers = resolvers.openers or { }
-resolvers.loaders = resolvers.loaders or { }
-
-resolvers.finders.notfound = { nil }
-resolvers.openers.notfound = { nil }
-resolvers.loaders.notfound = { false, nil, 0 }
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['data-out'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-outputs = outputs or { }
-
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['data-con'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format, lower, gsub = string.format, string.lower, string.gsub
-
-local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end)
-local trace_containers = false trackers.register("resolvers.containers", function(v) trace_containers = v end)
-local trace_storage = false trackers.register("resolvers.storage", function(v) trace_storage = v end)
-
---[[ldx--
-<p>Once we found ourselves defining similar cache constructs
-several times, containers were introduced. Containers are used
-to collect tables in memory and reuse them when possible based
-on (unique) hashes (to be provided by the calling function).</p>
-
-<p>Caching to disk is disabled by default. Version numbers are
-stored in the saved table which makes it possible to change the
-table structures without bothering about the disk cache.</p>
-
-<p>Examples of usage can be found in the font related code.</p>
---ldx]]--
-
-containers = containers or { }
-
-containers.usecache = true
-
-local function report(container,tag,name)
- if trace_cache or trace_containers then
- logs.report(format("%s cache",container.subcategory),"%s: %s",tag,name or 'invalid')
- end
-end
-
-local allocated = { }
-
--- tracing
-
-function containers.define(category, subcategory, version, enabled)
- return function()
- if category and subcategory then
- local c = allocated[category]
- if not c then
- c = { }
- allocated[category] = c
- end
- local s = c[subcategory]
- if not s then
- s = {
- category = category,
- subcategory = subcategory,
- storage = { },
- enabled = enabled,
- version = version or 1.000,
- trace = false,
- path = caches and caches.setpath and caches.setpath(category,subcategory),
- }
- c[subcategory] = s
- end
- return s
- else
- return nil
- end
- end
-end
-
-function containers.is_usable(container, name)
- return container.enabled and caches and caches.iswritable(container.path, name)
-end
-
-function containers.is_valid(container, name)
- if name and name ~= "" then
- local storage = container.storage[name]
- return storage and storage.cache_version == container.version
- else
- return false
- end
-end
-
-function containers.read(container,name)
- if container.enabled and caches and not container.storage[name] and containers.usecache then
- container.storage[name] = caches.loaddata(container.path,name)
- if containers.is_valid(container,name) then
- report(container,"loaded",name)
- else
- container.storage[name] = nil
- end
- end
- if container.storage[name] then
- report(container,"reusing",name)
- end
- return container.storage[name]
-end
-
-function containers.write(container, name, data)
- if data then
- data.cache_version = container.version
- if container.enabled and caches then
- local unique, shared = data.unique, data.shared
- data.unique, data.shared = nil, nil
- caches.savedata(container.path, name, data)
- report(container,"saved",name)
- data.unique, data.shared = unique, shared
- end
- report(container,"stored",name)
- container.storage[name] = data
- end
- return data
-end
-
-function containers.content(container,name)
- return container.storage[name]
-end
-
-function containers.cleanname(name)
- return (gsub(lower(name),"[^%w%d]+","-"))
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['data-use'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format, lower, gsub, find = string.format, string.lower, string.gsub, string.find
-
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-
--- since we want to use the cache instead of the tree, we will now
--- reimplement the saver.
-
-local save_data = resolvers.save_data
-local load_data = resolvers.load_data
-
-resolvers.cachepath = nil -- public, for tracing
-resolvers.usecache = true -- public, for tracing
-
-function resolvers.save_data(dataname)
- save_data(dataname, function(cachename,dataname)
- resolvers.usecache = not toboolean(resolvers.expansion("CACHEINTDS") or "false",true)
- if resolvers.usecache then
- resolvers.cachepath = resolvers.cachepath or caches.definepath("trees")
- return file.join(resolvers.cachepath(),caches.hashed(cachename))
- else
- return file.join(cachename,dataname)
- end
- end)
-end
-
-function resolvers.load_data(pathname,dataname,filename)
- load_data(pathname,dataname,filename,function(dataname,filename)
- resolvers.usecache = not toboolean(resolvers.expansion("CACHEINTDS") or "false",true)
- if resolvers.usecache then
- resolvers.cachepath = resolvers.cachepath or caches.definepath("trees")
- return file.join(resolvers.cachepath(),caches.hashed(pathname))
- else
- if not filename or (filename == "") then
- filename = dataname
- end
- return file.join(pathname,filename)
- end
- end)
-end
-
--- we will make a better format, maybe something xml or just text or lua
-
-resolvers.automounted = resolvers.automounted or { }
-
-function resolvers.automount(usecache)
- local mountpaths = resolvers.clean_path_list(resolvers.expansion('TEXMFMOUNT'))
- if (not mountpaths or #mountpaths == 0) and usecache then
- mountpaths = { caches.setpath("mount") }
- end
- if mountpaths and #mountpaths > 0 then
- statistics.starttiming(resolvers.instance)
- for k=1,#mountpaths do
- local root = mountpaths[k]
- local f = io.open(root.."/url.tmi")
- if f then
- for line in f:lines() do
- if line then
- if find(line,"^[%%#%-]") then -- or %W
- -- skip
- elseif find(line,"^zip://") then
- if trace_locating then
- logs.report("fileio","mounting %s",line)
- end
- table.insert(resolvers.automounted,line)
- resolvers.usezipfile(line)
- end
- end
- end
- f:close()
- end
- end
- statistics.stoptiming(resolvers.instance)
- end
-end
-
--- status info
-
-statistics.register("used config path", function() return caches.configpath() end)
-statistics.register("used cache path", function() return caches.temp() or "?" end)
-
--- experiment (code will move)
-
-function statistics.save_fmt_status(texname,formatbanner,sourcefile) -- texname == formatname
- local enginebanner = status.list().banner
- if formatbanner and enginebanner and sourcefile then
- local luvname = file.replacesuffix(texname,"luv")
- local luvdata = {
- enginebanner = enginebanner,
- formatbanner = formatbanner,
- sourcehash = md5.hex(io.loaddata(resolvers.find_file(sourcefile)) or "unknown"),
- sourcefile = sourcefile,
- }
- io.savedata(luvname,table.serialize(luvdata,true))
- end
-end
-
-function statistics.check_fmt_status(texname)
- local enginebanner = status.list().banner
- if enginebanner and texname then
- local luvname = file.replacesuffix(texname,"luv")
- if lfs.isfile(luvname) then
- local luv = dofile(luvname)
- if luv and luv.sourcefile then
- local sourcehash = md5.hex(io.loaddata(resolvers.find_file(luv.sourcefile)) or "unknown")
- local luvbanner = luv.enginebanner or "?"
- if luvbanner ~= enginebanner then
- return string.format("engine mismatch (luv:%s <> bin:%s)",luvbanner,enginebanner)
- end
- local luvhash = luv.sourcehash or "?"
- if luvhash ~= sourcehash then
- return string.format("source mismatch (luv:%s <> bin:%s)",luvhash,sourcehash)
- end
- else
- return "invalid status file"
- end
- else
- return "missing status file"
- end
- end
- return true
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['luat-kps'] = {
- version = 1.001,
- comment = "companion to luatools.lua",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-<p>This file is used when we want the input handlers to behave like
-<type>kpsewhich</type>. What to do with the following:</p>
-
-<typing>
-{$SELFAUTOLOC,$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}
-$SELFAUTOLOC : /usr/tex/bin/platform
-$SELFAUTODIR : /usr/tex/bin
-$SELFAUTOPARENT : /usr/tex
-</typing>
-
-<p>How about just forgetting about them?</p>
---ldx]]--
-
-local suffixes = resolvers.suffixes
-local formats = resolvers.formats
-
-suffixes['gf'] = { '<resolution>gf' }
-suffixes['pk'] = { '<resolution>pk' }
-suffixes['base'] = { 'base' }
-suffixes['bib'] = { 'bib' }
-suffixes['bst'] = { 'bst' }
-suffixes['cnf'] = { 'cnf' }
-suffixes['mem'] = { 'mem' }
-suffixes['mf'] = { 'mf' }
-suffixes['mfpool'] = { 'pool' }
-suffixes['mft'] = { 'mft' }
-suffixes['mppool'] = { 'pool' }
-suffixes['graphic/figure'] = { 'eps', 'epsi' }
-suffixes['texpool'] = { 'pool' }
-suffixes['PostScript header'] = { 'pro' }
-suffixes['ist'] = { 'ist' }
-suffixes['web'] = { 'web', 'ch' }
-suffixes['cweb'] = { 'w', 'web', 'ch' }
-suffixes['cmap files'] = { 'cmap' }
-suffixes['lig files'] = { 'lig' }
-suffixes['bitmap font'] = { }
-suffixes['MetaPost support'] = { }
-suffixes['TeX system documentation'] = { }
-suffixes['TeX system sources'] = { }
-suffixes['dvips config'] = { }
-suffixes['type42 fonts'] = { }
-suffixes['web2c files'] = { }
-suffixes['other text files'] = { }
-suffixes['other binary files'] = { }
-suffixes['opentype fonts'] = { 'otf' }
-
-suffixes['fmt'] = { 'fmt' }
-suffixes['texmfscripts'] = { 'rb','lua','py','pl' }
-
-suffixes['pdftex config'] = { }
-suffixes['Troff fonts'] = { }
-
-suffixes['ls-R'] = { }
-
---[[ldx--
-<p>If you wondered abou tsome of the previous mappings, how about
-the next bunch:</p>
---ldx]]--
-
-formats['bib'] = ''
-formats['bst'] = ''
-formats['mft'] = ''
-formats['ist'] = ''
-formats['web'] = ''
-formats['cweb'] = ''
-formats['MetaPost support'] = ''
-formats['TeX system documentation'] = ''
-formats['TeX system sources'] = ''
-formats['Troff fonts'] = ''
-formats['dvips config'] = ''
-formats['graphic/figure'] = ''
-formats['ls-R'] = ''
-formats['other text files'] = ''
-formats['other binary files'] = ''
-
-formats['gf'] = ''
-formats['pk'] = ''
-formats['base'] = 'MFBASES'
-formats['cnf'] = ''
-formats['mem'] = 'MPMEMS'
-formats['mf'] = 'MFINPUTS'
-formats['mfpool'] = 'MFPOOL'
-formats['mppool'] = 'MPPOOL'
-formats['texpool'] = 'TEXPOOL'
-formats['PostScript header'] = 'TEXPSHEADERS'
-formats['cmap files'] = 'CMAPFONTS'
-formats['type42 fonts'] = 'T42FONTS'
-formats['web2c files'] = 'WEB2C'
-formats['pdftex config'] = 'PDFTEXCONFIG'
-formats['texmfscripts'] = 'TEXMFSCRIPTS'
-formats['bitmap font'] = ''
-formats['lig files'] = 'LIGFONTS'
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['data-aux'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local find = string.find
-
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-
-function resolvers.update_script(oldname,newname) -- oldname -> own.name, not per se a suffix
- local scriptpath = "scripts/context/lua"
- newname = file.addsuffix(newname,"lua")
- local oldscript = resolvers.clean_path(oldname)
- if trace_locating then
- logs.report("fileio","to be replaced old script %s", oldscript)
- end
- local newscripts = resolvers.find_files(newname) or { }
- if #newscripts == 0 then
- if trace_locating then
- logs.report("fileio","unable to locate new script")
- end
- else
- for i=1,#newscripts do
- local newscript = resolvers.clean_path(newscripts[i])
- if trace_locating then
- logs.report("fileio","checking new script %s", newscript)
- end
- if oldscript == newscript then
- if trace_locating then
- logs.report("fileio","old and new script are the same")
- end
- elseif not find(newscript,scriptpath) then
- if trace_locating then
- logs.report("fileio","new script should come from %s",scriptpath)
- end
- elseif not (find(oldscript,file.removesuffix(newname).."$") or find(oldscript,newname.."$")) then
- if trace_locating then
- logs.report("fileio","invalid new script name")
- end
- else
- local newdata = io.loaddata(newscript)
- if newdata then
- if trace_locating then
- logs.report("fileio","old script content replaced by new content")
- end
- io.savedata(oldscript,newdata)
- break
- elseif trace_locating then
- logs.report("fileio","unable to load new script")
- end
- end
- end
- end
-end
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['data-lst'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- used in mtxrun
-
-local find, concat, upper, format = string.find, table.concat, string.upper, string.format
-
-resolvers.listers = resolvers.listers or { }
-
-local function tabstr(str)
- if type(str) == 'table' then
- return concat(str," | ")
- else
- return str
- end
-end
-
-local function list(list,report)
- local instance = resolvers.instance
- local pat = upper(pattern or "","")
- local report = report or texio.write_nl
- local sorted = table.sortedkeys(list)
- for i=1,#sorted do
- local key = sorted[i]
- if instance.pattern == "" or find(upper(key),pat) then
- if instance.kpseonly then
- if instance.kpsevars[key] then
- report(format("%s=%s",key,tabstr(list[key])))
- end
- else
- report(format('%s %s=%s',(instance.kpsevars[key] and 'K') or 'E',key,tabstr(list[key])))
- end
- end
- end
-end
-
-function resolvers.listers.variables () list(resolvers.instance.variables ) end
-function resolvers.listers.expansions() list(resolvers.instance.expansions) end
-
-function resolvers.listers.configurations(report)
- local report = report or texio.write_nl
- local instance = resolvers.instance
- local sorted = table.sortedkeys(instance.kpsevars)
- for i=1,#sorted do
- local key = sorted[i]
- if not instance.pattern or (instance.pattern=="") or find(key,instance.pattern) then
- report(format("%s\n",key))
- local order = instance.order
- for i=1,#order do
- local str = order[i][key]
- if str then
- report(format("\t%s\t%s",i,str))
- end
- end
- report("")
- end
- end
-end
-
-
-end -- of closure
--- end library merge
-
--- We initialize some characteristics of this program. We need to
--- do this before we load the libraries, else own.name will not be
--- properly set (handy for selfcleaning the file). It's an ugly
--- looking piece of code.
-
-own = { }
-
-own.libs = { -- todo: check which ones are really needed
- 'l-string.lua',
- 'l-lpeg.lua',
- 'l-table.lua',
- 'l-io.lua',
- 'l-number.lua',
- 'l-set.lua',
- 'l-os.lua',
- 'l-file.lua',
- 'l-md5.lua',
- 'l-url.lua',
- 'l-dir.lua',
- 'l-boolean.lua',
- 'l-unicode.lua',
- 'l-math.lua',
- 'l-utils.lua',
- 'l-aux.lua',
- 'trac-tra.lua',
- 'luat-env.lua',
- 'trac-inf.lua',
- 'trac-log.lua',
- 'data-res.lua',
- 'data-tmp.lua',
--- 'data-pre.lua',
- 'data-inp.lua',
- 'data-out.lua',
- 'data-con.lua',
- 'data-use.lua',
--- 'data-tex.lua',
--- 'data-bin.lua',
--- 'data-zip.lua',
--- 'data-crl.lua',
--- 'data-lua.lua',
- 'data-kps.lua', -- so that we can replace kpsewhich
- 'data-aux.lua', -- updater
- 'data-lst.lua', -- lister
-}
-
--- We need this hack till luatex is fixed.
-
-if arg and arg[0] == 'luatex' and arg[1] == "--luaonly" then
- arg[-1]=arg[0] arg[0]=arg[2] for k=3,#arg do arg[k-2]=arg[k] end arg[#arg]=nil arg[#arg]=nil
-end
-
--- End of hack.
-
-own.name = (environment and environment.ownname) or arg[0] or 'luatools.lua'
-own.path = string.match(own.name,"^(.+)[\\/].-$") or "."
-own.list = { '.' }
-
-if own.path ~= '.' then
- table.insert(own.list,own.path)
-end
-
-table.insert(own.list,own.path.."/../../../tex/context/base")
-table.insert(own.list,own.path.."/mtx")
-table.insert(own.list,own.path.."/../sources")
-
-function locate_libs()
- for _, lib in pairs(own.libs) do
- for _, pth in pairs(own.list) do
- local filename = string.gsub(pth .. "/" .. lib,"\\","/")
- local codeblob = loadfile(filename)
- if codeblob then
- codeblob()
- own.list = { pth } -- speed up te search
- break
- end
- end
- end
-end
-
-if not resolvers then
- locate_libs()
-end
-
-if not resolvers then
- print("")
- print("Luatools is unable to start up due to lack of libraries. You may")
- print("try to run 'lua luatools.lua --selfmerge' in the path where this")
- print("script is located (normally under ..../scripts/context/lua) which")
- print("will make luatools library independent.")
- os.exit()
-end
-
-logs.setprogram('LuaTools',"TDS Management Tool 1.32",environment.arguments["verbose"] or false)
-
-local instance = resolvers.reset()
-
-resolvers.defaultlibs = { -- not all are needed (this will become: context.lus (lua spec)
- 'l-string.lua',
- 'l-lpeg.lua',
- 'l-table.lua',
- 'l-boolean.lua',
- 'l-number.lua',
- 'l-unicode.lua',
- 'l-os.lua',
- 'l-io.lua',
- 'l-file.lua',
- 'l-md5.lua',
- 'l-url.lua',
- 'l-dir.lua',
- 'l-utils.lua',
- 'l-dimen.lua',
- 'trac-inf.lua',
- 'trac-tra.lua',
- 'trac-log.lua',
- 'luat-env.lua', -- here ?
- 'data-res.lua',
- 'data-inp.lua',
- 'data-out.lua',
- 'data-tmp.lua',
- 'data-con.lua',
- 'data-use.lua',
--- 'data-pre.lua',
- 'data-tex.lua',
- 'data-bin.lua',
--- 'data-zip.lua',
--- 'data-clr.lua',
- 'data-lua.lua',
- 'data-ctx.lua',
- 'luat-fio.lua',
- 'luat-cnf.lua',
-}
-
-instance.engine = environment.arguments["engine"] or 'luatex'
-instance.progname = environment.arguments["progname"] or 'context'
-instance.luaname = environment.arguments["luafile"] or "" -- environment.ownname or ""
-instance.lualibs = environment.arguments["lualibs"] or table.concat(resolvers.defaultlibs,",")
-instance.allresults = environment.arguments["all"] or false
-instance.pattern = environment.arguments["pattern"] or nil
-instance.sortdata = environment.arguments["sort"] or false
-instance.kpseonly = not environment.arguments["all"] or false
-instance.my_format = environment.arguments["format"] or instance.format
-
-if type(instance.pattern) == 'boolean' then
- logs.simple("invalid pattern specification")
- instance.pattern = nil
-end
-
-if environment.arguments["trace"] then resolvers.settrace(environment.arguments["trace"]) end
-
-local trackspec = environment.argument("trackers") or environment.argument("track")
-
-if trackspec then
- trackers.enable(trackspec)
-end
-
-runners = runners or { }
-messages = messages or { }
-
-messages.no_ini_file = [[
-There is no lua initialization file found. This file can be forced by the
-"--progname" directive, or specified with "--luaname", or it is derived
-automatically from the formatname (aka jobname). It may be that you have
-to regenerate the file database using "luatools --generate".
-]]
-
-messages.help = [[
---generate generate file database
---variables show configuration variables
---expansions show expanded variables
---configurations show configuration order
---expand-braces expand complex variable
---expand-path expand variable (resolve paths)
---expand-var expand variable (resolve references)
---show-path show path expansion of ...
---var-value report value of variable
---find-file report file location
---find-path report path of file
---make or --ini make luatex format
---run or --fmt= run luatex format
---luafile=str lua inifile (default is <progname>.lua)
---lualibs=list libraries to assemble (optional when --compile)
---compile assemble and compile lua inifile
---verbose give a bit more info
---all show all found files
---sort sort cached data
---engine=str target engine
---progname=str format or backend
---pattern=str filter variables
---trackers=list enable given trackers
-]]
-
-function runners.make_format(texname)
- local instance = resolvers.instance
- if texname and texname ~= "" then
- if resolvers.usecache then
- local path = file.join(caches.setpath("formats")) -- maybe platform
- if path and lfs then
- lfs.chdir(path)
- end
- end
- local barename = texname:gsub("%.%a+$","")
- if barename == texname then
- texname = texname .. ".tex"
- end
- local fullname = resolvers.find_files(texname)[1] or ""
- if fullname == "" then
- logs.simple("no tex file with name: %s",texname)
- else
- local luaname, lucname, luapath, lualibs = "", "", "", { }
- -- the following is optional, since context.lua can also
- -- handle this collect and compile business
- if environment.arguments["compile"] then
- if luaname == "" then luaname = barename end
- logs.simple("creating initialization file: %s",luaname)
- luapath = file.dirname(luaname)
- if luapath == "" then
- luapath = file.dirname(texname)
- end
- if luapath == "" then
- luapath = file.dirname(resolvers.find_files(texname)[1] or "")
- end
- lualibs = string.split(instance.lualibs,",")
- luaname = file.basename(barename .. ".lua")
- lucname = file.basename(barename .. ".luc")
- -- todo: when this fails, we can just copy the merged libraries from
- -- luatools since they are normally the same, at least for context
- if lualibs[1] then
- local firstlib = file.join(luapath,lualibs[1])
- if not lfs.isfile(firstlib) then
- local foundname = resolvers.find_files(lualibs[1])[1]
- if foundname then
- logs.simple("located library path: %s",luapath)
- luapath = file.dirname(foundname)
- end
- end
- end
- logs.simple("using library path: %s",luapath)
- logs.simple("using lua libraries: %s",table.join(lualibs," "))
- utils.merger.selfcreate(lualibs,luapath,luaname)
- local strip = resolvers.boolean_variable("LUACSTRIP", true)
- if utils.lua.compile(luaname,lucname,false,strip) and io.exists(lucname) then
- luaname = lucname
- logs.simple("using compiled initialization file: %s",lucname)
- else
- logs.simple("using uncompiled initialization file: %s",luaname)
- end
- else
- local what = { instance.luaname, instance.progname, barename }
- for k=1,#what do
- local v = string.gsub(what[k]..".lua","%.lua%.lua$",".lua")
- if v and (v ~= "") then
- luaname = resolvers.find_files(v)[1] or ""
- if luaname ~= "" then
- break
- end
- end
- end
- end
- if environment.arguments["noluc"] then
- luaname = luaname:gsub("%.luc$",".lua") -- make this an option
- end
- if luaname == "" then
- if logs.verbose then
- logs.simplelines(messages.no_ini_file)
- logs.simple("texname : %s",texname)
- logs.simple("luaname : %s",instance.luaname)
- logs.simple("progname: %s",instance.progname)
- logs.simple("barename: %s",barename)
- end
- else
- logs.simple("using lua initialization file: %s",luaname)
- local mp = dir.glob(file.removesuffix(file.basename(luaname)).."-*.mem")
- if mp and #mp > 0 then
- for i=1,#mp do
- local name = mp[i]
- logs.simple("removing related mplib format %s", file.basename(name))
- os.remove(name)
- end
- end
- local flags = {
- "--ini",
- "--lua=" .. string.quote(luaname)
- }
- local bs = (os.platform == "unix" and "\\\\") or "\\" -- todo: make a function
- local command = "luatex ".. table.concat(flags," ") .. " " .. string.quote(fullname) .. " " .. bs .. "dump"
- logs.simple("running command: %s\n",command)
- os.spawn(command)
- -- todo: do a dummy run that generates the related metafun and mfplain formats
- end
- end
- else
- logs.simple("no tex file given")
- end
-end
-
-function runners.run_format(name,data,more)
- -- hm, rather old code here; we can now use the file.whatever functions
- if name and (name ~= "") then
- local barename = name:gsub("%.%a+$","")
- local fmtname = ""
- if resolvers.usecache then
- local path = file.join(caches.setpath("formats")) -- maybe platform
- fmtname = file.join(path,barename..".fmt") or ""
- end
- if fmtname == "" then
- fmtname = resolvers.find_files(barename..".fmt")[1] or ""
- end
- fmtname = resolvers.clean_path(fmtname)
- barename = fmtname:gsub("%.%a+$","")
- if fmtname == "" then
- logs.simple("no format with name: %s",name)
- else
- local luaname = barename .. ".luc"
- local f = io.open(luaname)
- if not f then
- luaname = barename .. ".lua"
- f = io.open(luaname)
- end
- if f then
- f:close()
- local command = "luatex --fmt=" .. string.quote(barename) .. " --lua=" .. string.quote(luaname) .. " " .. string.quote(data) .. " " .. (more ~= "" and string.quote(more) or "")
- logs.simple("running command: %s",command)
- os.spawn(command)
- else
- logs.simple("using format name: %s",fmtname)
- logs.simple("no luc/lua with name: %s",barename)
- end
- end
- end
-end
-
-local ok = true
-
--- private option --noluc for testing errors in the stub
-
-if environment.arguments["find-file"] then
- resolvers.load()
- instance.format = environment.arguments["format"] or instance.format
- if instance.pattern then
- instance.allresults = true
- resolvers.for_files(resolvers.find_files, { instance.pattern }, instance.my_format)
- else
- resolvers.for_files(resolvers.find_files, environment.files, instance.my_format)
- end
-elseif environment.arguments["find-path"] then
- resolvers.load()
- local path = resolvers.find_file(environment.files[1], instance.my_format)
- if logs.verbose then
- logs.simple(file.dirname(path))
- else
- print(file.dirname(path))
- end
-elseif environment.arguments["run"] then
- resolvers.load("nofiles") -- ! no need for loading databases
- logs.setverbose(true)
- runners.run_format(environment.files[1] or "",environment.files[2] or "",environment.files[3] or "")
-elseif environment.arguments["fmt"] then
- resolvers.load("nofiles") -- ! no need for loading databases
- logs.setverbose(true)
- runners.run_format(environment.arguments["fmt"], environment.files[1] or "",environment.files[2] or "")
-elseif environment.arguments["expand-braces"] then
- resolvers.load("nofiles")
- resolvers.for_files(resolvers.expand_braces, environment.files)
-elseif environment.arguments["expand-path"] then
- resolvers.load("nofiles")
- resolvers.for_files(resolvers.expand_path, environment.files)
-elseif environment.arguments["expand-var"] or environment.arguments["expand-variable"] then
- resolvers.load("nofiles")
- resolvers.for_files(resolvers.expand_var, environment.files)
-elseif environment.arguments["show-path"] or environment.arguments["path-value"] then
- resolvers.load("nofiles")
- resolvers.for_files(resolvers.show_path, environment.files)
-elseif environment.arguments["var-value"] or environment.arguments["show-value"] then
- resolvers.load("nofiles")
- resolvers.for_files(resolvers.var_value, environment.files)
-elseif environment.arguments["format-path"] then
- resolvers.load()
- logs.simple(caches.setpath("format"))
-elseif instance.pattern then -- brrr
- resolvers.load()
- instance.format = environment.arguments["format"] or instance.format
- instance.allresults = true
- resolvers.for_files(resolvers.find_files, { instance.pattern }, instance.my_format)
-elseif environment.arguments["generate"] then
- instance.renewcache = true
- logs.setverbose(true)
- resolvers.load()
-elseif environment.arguments["make"] or environment.arguments["ini"] or environment.arguments["compile"] then
- resolvers.load()
- logs.setverbose(true)
- runners.make_format(environment.files[1] or "")
-elseif environment.arguments["selfmerge"] then
- utils.merger.selfmerge(own.name,own.libs,own.list)
-elseif environment.arguments["selfclean"] then
- utils.merger.selfclean(own.name)
-elseif environment.arguments["selfupdate"] then
- resolvers.load()
- logs.setverbose(true)
- resolvers.update_script(own.name,"luatools")
-elseif environment.arguments["variables"] or environment.arguments["show-variables"] then
- resolvers.load("nofiles")
- resolvers.listers.variables()
-elseif environment.arguments["expansions"] or environment.arguments["show-expansions"] then
- resolvers.load("nofiles")
- resolvers.listers.expansions()
-elseif environment.arguments["configurations"] or environment.arguments["show-configurations"] then
- resolvers.load("nofiles")
- resolvers.listers.configurations()
-elseif environment.arguments["help"] or (environment.files[1]=='help') or (#environment.files==0) then
- logs.help(messages.help)
-else
- resolvers.load()
- resolvers.for_files(resolvers.find_files, environment.files, instance.my_format)
-end
-
-if logs.verbose then
- logs.simpleline()
- logs.simple("runtime: %0.3f seconds",os.runtime())
-end
-
-if os.platform == "unix" then
- io.write("\n")
-end
+#!/bin/sh
+mtxrun --script base "$@"
diff --git a/scripts/context/stubs/unix/mtxrun b/scripts/context/stubs/unix/mtxrun
index b99327692..46db66493 100644
--- a/scripts/context/stubs/unix/mtxrun
+++ b/scripts/context/stubs/unix/mtxrun
@@ -38,8 +38,6 @@ if not modules then modules = { } end modules ['mtxrun'] = {
-- remember for subruns: _CTX_K_S_#{original}_
-- remember for subruns: TEXMFSTART.#{original} [tex.rb texmfstart.rb]
-texlua = true
-
-- begin library merge
@@ -97,13 +95,6 @@ function string:unquote()
return (gsub(self,"^([\"\'])(.*)%1$","%2"))
end
---~ function string:unquote()
---~ if find(self,"^[\'\"]") then
---~ return sub(self,2,-2)
---~ else
---~ return self
---~ end
---~ end
function string:quote() -- we could use format("%q")
return format("%q",self)
@@ -126,11 +117,6 @@ function string:limit(n,sentinel)
end
end
---~ function string:strip() -- the .- is quite efficient
---~ -- return match(self,"^%s*(.-)%s*$") or ""
---~ -- return match(self,'^%s*(.*%S)') or '' -- posted on lua list
---~ return find(s,'^%s*$') and '' or match(s,'^%s*(.*%S)')
---~ end
do -- roberto's variant:
local space = lpeg.S(" \t\v\n")
@@ -217,13 +203,6 @@ function is_number(str) -- tonumber
return find(str,"^[%-%+]?[%d]-%.?[%d+]$") == 1
end
---~ print(is_number("1"))
---~ print(is_number("1.1"))
---~ print(is_number(".1"))
---~ print(is_number("-0.1"))
---~ print(is_number("+0.1"))
---~ print(is_number("-.1"))
---~ print(is_number("+.1"))
function string:split_settings() -- no {} handling, see l-aux for lpeg variant
if find(self,"=") then
@@ -278,18 +257,6 @@ function string:totable()
return lpegmatch(pattern,self)
end
---~ local t = {
---~ "1234567123456712345671234567",
---~ "a\tb\tc",
---~ "aa\tbb\tcc",
---~ "aaa\tbbb\tccc",
---~ "aaaa\tbbbb\tcccc",
---~ "aaaaa\tbbbbb\tccccc",
---~ "aaaaaa\tbbbbbb\tcccccc",
---~ }
---~ for k,v do
---~ print(string.tabtospace(t[k]))
---~ end
function string.tabtospace(str,tab)
-- we don't handle embedded newlines
@@ -390,6 +357,11 @@ patterns.whitespace = patterns.eol + patterns.spacer
patterns.nonwhitespace = 1 - patterns.whitespace
patterns.utf8 = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
patterns.utfbom = P('\000\000\254\255') + P('\255\254\000\000') + P('\255\254') + P('\254\255') + P('\239\187\191')
+patterns.validutf8 = patterns.utf8^0 * P(-1) * Cc(true) + Cc(false)
+
+patterns.undouble = P('"')/"" * (1-P('"'))^0 * P('"')/""
+patterns.unsingle = P("'")/"" * (1-P("'"))^0 * P("'")/""
+patterns.unspacer = ((patterns.spacer^1)/"")^0
function lpeg.anywhere(pattern) --slightly adapted from website
return P { P(pattern) + 1 * V(1) } -- why so complex?
@@ -412,10 +384,6 @@ end
patterns.textline = content
---~ local p = lpeg.splitat("->",false) print(match(p,"oeps->what->more")) -- oeps what more
---~ local p = lpeg.splitat("->",true) print(match(p,"oeps->what->more")) -- oeps what->more
---~ local p = lpeg.splitat("->",false) print(match(p,"oeps")) -- oeps
---~ local p = lpeg.splitat("->",true) print(match(p,"oeps")) -- oeps
local splitters_s, splitters_m = { }, { }
@@ -484,19 +452,7 @@ function string:checkedsplit(separator)
return match(c,self)
end
---~ function lpeg.append(list,pp)
---~ local p = pp
---~ for l=1,#list do
---~ if p then
---~ p = p + P(list[l])
---~ else
---~ p = P(list[l])
---~ end
---~ end
---~ return p
---~ end
---~ from roberto's site:
local f1 = string.byte
@@ -506,6 +462,53 @@ local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 6
patterns.utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+local cache = { }
+
+function lpeg.stripper(str)
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+end
+
+function lpeg.replacer(t)
+ if #t > 0 then
+ local p
+ for i=1,#t do
+ local ti= t[i]
+ local pp = P(ti[1]) / ti[2]
+ p = (p and p + pp ) or pp
+ end
+ return Cs((p + 1)^0)
+ end
+end
+
+
+local splitters_f, splitters_s = { }, { }
+
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = C((1 - separator)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = (1 - separator)^0 * separator * C(P(1)^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+
end -- of closure
@@ -783,9 +786,6 @@ function table.one_entry(t) -- obolete, use inline code instead
return n and not next(t,n)
end
---~ function table.starts_at(t) -- obsolete, not nice anyway
---~ return ipairs(t,1)(t,0)
---~ end
function table.tohash(t,value)
local h = { }
@@ -806,12 +806,6 @@ function table.fromhash(t)
return h
end
---~ print(table.serialize(t), "\n")
---~ print(table.serialize(t,"name"), "\n")
---~ print(table.serialize(t,false), "\n")
---~ print(table.serialize(t,true), "\n")
---~ print(table.serialize(t,"name",true), "\n")
---~ print(table.serialize(t,"name",true,true), "\n")
table.serialize_functions = true
table.serialize_compact = true
@@ -871,8 +865,7 @@ local function do_serialize(root,name,depth,level,indexed)
if indexed then
handle(format("%s{",depth))
elseif name then
- --~ handle(format("%s%s={",depth,key(name)))
- if type(name) == "number" then -- or find(k,"^%d+$") then
+ if type(name) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s[0x%04X]={",depth,name))
else
@@ -901,10 +894,8 @@ local function do_serialize(root,name,depth,level,indexed)
for i=1,#sk do
local k = sk[i]
local v = root[k]
- --~ if v == root then
- -- circular
- --~ else
- local t = type(v)
+ -- circular
+ local t = type(v)
if compact and first and type(k) == "number" and k >= first and k <= last then
if t == "number" then
if hexify then
@@ -947,12 +938,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s __p__=nil,",depth))
end
elseif t == "number" then
- --~ if hexify then
- --~ handle(format("%s %s=0x%04X,",depth,key(k),v))
- --~ else
- --~ handle(format("%s %s=%s,",depth,key(k),v)) -- %.99g
- --~ end
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
else
@@ -973,8 +959,7 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "string" then
if reduce and tonumber(v) then
- --~ handle(format("%s %s=%s,",depth,key(k),v))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]=%s,",depth,k,v))
else
@@ -986,8 +971,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%q]=%s,",depth,k,v))
end
else
- --~ handle(format("%s %s=%q,",depth,key(k),v))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]=%q,",depth,k,v))
else
@@ -1001,8 +985,7 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "table" then
if not next(v) then
- --~ handle(format("%s %s={},",depth,key(k)))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]={},",depth,k))
else
@@ -1016,8 +999,7 @@ local function do_serialize(root,name,depth,level,indexed)
elseif inline then
local st = simple_table(v)
if st then
- --~ handle(format("%s %s={ %s },",depth,key(k),concat(st,", ")))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
else
@@ -1035,8 +1017,7 @@ local function do_serialize(root,name,depth,level,indexed)
do_serialize(v,k,depth,level+1)
end
elseif t == "boolean" then
- --~ handle(format("%s %s=%s,",depth,key(k),tostring(v)))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]=%s,",depth,k,tostring(v)))
else
@@ -1049,8 +1030,7 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "function" then
if functions then
- --~ handle(format('%s %s=loadstring(%q),',depth,key(k),dump(v)))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]=loadstring(%q),",depth,k,dump(v)))
else
@@ -1063,8 +1043,7 @@ local function do_serialize(root,name,depth,level,indexed)
end
end
else
- --~ handle(format("%s %s=%q,",depth,key(k),tostring(v)))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if type(k) == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
else
@@ -1076,8 +1055,7 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%q]=%q,",depth,k,tostring(v)))
end
end
- --~ end
- end
+ end
end
if level > 0 then
handle(format("%s},",depth))
@@ -1118,19 +1096,11 @@ local function serialize(root,name,_handle,_reduce,_noquotes,_hexify)
handle("t={")
end
if root and next(root) then
- do_serialize(root,name,"",0,indexed)
+ do_serialize(root,name,"",0)
end
handle("}")
end
---~ name:
---~
---~ true : return { }
---~ false : { }
---~ nil : t = { }
---~ string : string = { }
---~ 'return' : return { }
---~ number : [number] = { }
function table.serialize(root,name,reduce,noquotes,hexify)
local t = { }
@@ -1353,9 +1323,6 @@ function table.swapped(t)
return s
end
---~ function table.are_equal(a,b)
---~ return table.serialize(a) == table.serialize(b)
---~ end
function table.clone(t,p) -- t is optional or nil or table
if not p then
@@ -1421,6 +1388,17 @@ function table.insert_after_value(t,value,extra)
insert(t,#t+1,extra)
end
+function table.sequenced(t,sep)
+ local s = { }
+ for k, v in next, t do -- indexed?
+ s[#s+1] = k .. "=" .. tostring(v)
+ end
+ return concat(s, sep or " | ")
+end
+
+function table.print(...)
+ print(table.serialize(...))
+end
end -- of closure
@@ -1756,17 +1734,6 @@ function set.contains(n,s)
end
end
---~ local c = set.create{'aap','noot','mies'}
---~ local s = set.tonumber(c)
---~ local t = set.totable(s)
---~ print(t['aap'])
---~ local c = set.create{'zus','wim','jet'}
---~ local s = set.tonumber(c)
---~ local t = set.totable(s)
---~ print(t['aap'])
---~ print(t['jet'])
---~ print(set.contains(t,'jet'))
---~ print(set.contains(t,'aap'))
@@ -1784,29 +1751,97 @@ if not modules then modules = { } end modules ['l-os'] = {
-- maybe build io.flush in os.execute
-local find, format, gsub = string.find, string.format, string.gsub
+local find, format, gsub, upper = string.find, string.format, string.gsub, string.upper
local random, ceil = math.random, math.ceil
+local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
+local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
+
+-- The following code permits traversing the environment table, at least
+-- in luatex. Internally all environment names are uppercase.
+
+if not os.__getenv__ then
+
+ os.__getenv__ = os.getenv
+ os.__setenv__ = os.setenv
+
+ if os.env then
-local execute, spawn, exec, ioflush = os.execute, os.spawn or os.execute, os.exec or os.execute, io.flush
+ local osgetenv = os.getenv
+ local ossetenv = os.setenv
+ local osenv = os.env local _ = osenv.PATH -- initialize the table
+
+ function os.setenv(k,v)
+ if v == nil then
+ v = ""
+ end
+ local K = upper(k)
+ osenv[K] = v
+ ossetenv(K,v)
+ end
+
+ function os.getenv(k)
+ local K = upper(k)
+ local v = osenv[K] or osenv[k] or osgetenv(K) or osgetenv(k)
+ if v == "" then
+ return nil
+ else
+ return v
+ end
+ end
+
+ else
+
+ local ossetenv = os.setenv
+ local osgetenv = os.getenv
+ local osenv = { }
+
+ function os.setenv(k,v)
+ if v == nil then
+ v = ""
+ end
+ local K = upper(k)
+ osenv[K] = v
+ end
+
+ function os.getenv(k)
+ local K = upper(k)
+ local v = osenv[K] or osgetenv(K) or osgetenv(k)
+ if v == "" then
+ return nil
+ else
+ return v
+ end
+ end
+
+ local function __index(t,k)
+ return os.getenv(k)
+ end
+ local function __newindex(t,k,v)
+ os.setenv(k,v)
+ end
+
+ os.env = { }
+
+ setmetatable(os.env, { __index = __index, __newindex = __newindex } )
+
+ end
+
+end
+
+-- end of environment hack
+
+local execute, spawn, exec, iopopen, ioflush = os.execute, os.spawn or os.execute, os.exec or os.execute, io.popen, io.flush
function os.execute(...) ioflush() return execute(...) end
function os.spawn (...) ioflush() return spawn (...) end
function os.exec (...) ioflush() return exec (...) end
+function io.popen (...) ioflush() return iopopen(...) end
function os.resultof(command)
- ioflush() -- else messed up logging
local handle = io.popen(command,"r")
- if not handle then
- -- print("unknown command '".. command .. "' in os.resultof")
- return ""
- else
- return handle:read("*all") or ""
- end
+ return handle and handle:read("*all") or ""
end
---~ os.type : windows | unix (new, we already guessed os.platform)
---~ os.name : windows | msdos | linux | macosx | solaris | .. | generic (new)
---~ os.platform : extended os.name with architecture
if not io.fileseparator then
if find(os.getenv("PATH"),";") then
@@ -1856,11 +1891,6 @@ function os.runtime()
return os.gettimeofday() - startuptime
end
---~ print(os.gettimeofday()-os.time())
---~ os.sleep(1.234)
---~ print (">>",os.runtime())
---~ print(os.date("%H:%M:%S",os.gettimeofday()))
---~ print(os.date("%H:%M:%S",os.time()))
-- no need for function anymore as we have more clever code and helpers now
-- this metatable trickery might as well disappear
@@ -1878,24 +1908,6 @@ end
setmetatable(os,osmt)
-if not os.setenv then
-
- -- we still store them but they won't be seen in
- -- child processes although we might pass them some day
- -- using command concatination
-
- local env, getenv = { }, os.getenv
-
- function os.setenv(k,v)
- env[k] = v
- end
-
- function os.getenv(k)
- return env[k] or getenv(k)
- end
-
-end
-
-- we can use HOSTTYPE on some platforms
local name, platform = os.name or "linux", os.getenv("MTX_PLATFORM") or ""
@@ -2016,7 +2028,7 @@ elseif name == "kfreebsd" then
-- we sometims have HOSTTYPE set so let's check that first
local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
if find(architecture,"x86_64") then
- platform = "kfreebsd-64"
+ platform = "kfreebsd-amd64"
else
platform = "kfreebsd-i386"
end
@@ -2093,59 +2105,81 @@ if not modules then modules = { } end modules ['l-file'] = {
file = file or { }
-local concat = table.concat
+local insert, concat = table.insert, table.concat
local find, gmatch, match, gsub, sub, char = string.find, string.gmatch, string.match, string.gsub, string.sub, string.char
local lpegmatch = lpeg.match
+local getcurrentdir = lfs.currentdir
-function file.removesuffix(filename)
- return (gsub(filename,"%.[%a%d]+$",""))
+local function dirname(name,default)
+ return match(name,"^(.+)[/\\].-$") or (default or "")
end
-function file.addsuffix(filename, suffix)
- if not suffix or suffix == "" then
- return filename
- elseif not find(filename,"%.[%a%d]+$") then
- return filename .. "." .. suffix
- else
- return filename
- end
+local function basename(name)
+ return match(name,"^.+[/\\](.-)$") or name
end
-function file.replacesuffix(filename, suffix)
- return (gsub(filename,"%.[%a%d]+$","")) .. "." .. suffix
+local function nameonly(name)
+ return (gsub(match(name,"^.+[/\\](.-)$") or name,"%..*$",""))
end
-function file.dirname(name,default)
- return match(name,"^(.+)[/\\].-$") or (default or "")
+local function extname(name,default)
+ return match(name,"^.+%.([^/\\]-)$") or default or ""
end
-function file.basename(name)
- return match(name,"^.+[/\\](.-)$") or name
+local function splitname(name)
+ local n, s = match(name,"^(.+)%.([^/\\]-)$")
+ return n or name, s or ""
end
-function file.nameonly(name)
- return (gsub(match(name,"^.+[/\\](.-)$") or name,"%..*$",""))
+file.basename = basename
+file.dirname = dirname
+file.nameonly = nameonly
+file.extname = extname
+file.suffix = extname
+
+function file.removesuffix(filename)
+ return (gsub(filename,"%.[%a%d]+$",""))
end
-function file.extname(name,default)
- return match(name,"^.+%.([^/\\]-)$") or default or ""
+function file.addsuffix(filename, suffix, criterium)
+ if not suffix or suffix == "" then
+ return filename
+ elseif criterium == true then
+ return filename .. "." .. suffix
+ elseif not criterium then
+ local n, s = splitname(filename)
+ if not s or s == "" then
+ return filename .. "." .. suffix
+ else
+ return filename
+ end
+ else
+ local n, s = splitname(filename)
+ if s and s ~= "" then
+ local t = type(criterium)
+ if t == "table" then
+ -- keep if in criterium
+ for i=1,#criterium do
+ if s == criterium[i] then
+ return filename
+ end
+ end
+ elseif t == "string" then
+ -- keep if criterium
+ if s == criterium then
+ return filename
+ end
+ end
+ end
+ return n .. "." .. suffix
+ end
end
-file.suffix = file.extname
---~ function file.join(...)
---~ local pth = concat({...},"/")
---~ pth = gsub(pth,"\\","/")
---~ local a, b = match(pth,"^(.*://)(.*)$")
---~ if a and b then
---~ return a .. gsub(b,"//+","/")
---~ end
---~ a, b = match(pth,"^(//)(.*)$")
---~ if a and b then
---~ return a .. gsub(b,"//+","/")
---~ end
---~ return (gsub(pth,"//+","/"))
---~ end
+function file.replacesuffix(filename, suffix)
+ return (gsub(filename,"%.[%a%d]+$","")) .. "." .. suffix
+end
+
local trick_1 = char(1)
local trick_2 = "^" .. trick_1 .. "/+"
@@ -2173,18 +2207,9 @@ function file.join(...)
return (gsub(pth,"//+","/"))
end
---~ print(file.join("//","/y"))
---~ print(file.join("/","/y"))
---~ print(file.join("","/y"))
---~ print(file.join("/x/","/y"))
---~ print(file.join("x/","/y"))
---~ print(file.join("http://","/y"))
---~ print(file.join("http://a","/y"))
---~ print(file.join("http:///a","/y"))
---~ print(file.join("//nas-1","/y"))
function file.iswritable(name)
- local a = lfs.attributes(name) or lfs.attributes(file.dirname(name,"."))
+ local a = lfs.attributes(name) or lfs.attributes(dirname(name,"."))
return a and sub(a.permissions,2,2) == "w"
end
@@ -2198,17 +2223,6 @@ file.is_writable = file.iswritable
-- todo: lpeg
---~ function file.split_path(str)
---~ local t = { }
---~ str = gsub(str,"\\", "/")
---~ str = gsub(str,"(%a):([;/])", "%1\001%2")
---~ for name in gmatch(str,"([^;:]+)") do
---~ if name ~= "" then
---~ t[#t+1] = gsub(name,"\001",":")
---~ end
---~ end
---~ return t
---~ end
local checkedsplit = string.checkedsplit
@@ -2223,31 +2237,62 @@ end
-- we can hash them weakly
-function file.collapse_path(str)
+
+function file.collapse_path(str,anchor)
+ if anchor and not find(str,"^/") and not find(str,"^%a:") then
+ str = getcurrentdir() .. "/" .. str
+ end
+ if str == "" or str =="." then
+ return "."
+ elseif find(str,"^%.%.") then
+ str = gsub(str,"\\","/")
+ return str
+ elseif not find(str,"%.") then
+ str = gsub(str,"\\","/")
+ return str
+ end
str = gsub(str,"\\","/")
- if find(str,"/") then
- str = gsub(str,"^%./",(gsub(lfs.currentdir(),"\\","/")) .. "/") -- ./xx in qualified
- str = gsub(str,"/%./","/")
- local n, m = 1, 1
- while n > 0 or m > 0 do
- str, n = gsub(str,"[^/%.]+/%.%.$","")
- str, m = gsub(str,"[^/%.]+/%.%./","")
+ local starter, rest = match(str,"^(%a+:/*)(.-)$")
+ if starter then
+ str = rest
+ end
+ local oldelements = checkedsplit(str,"/")
+ local newelements = { }
+ local i = #oldelements
+ while i > 0 do
+ local element = oldelements[i]
+ if element == '.' then
+ -- do nothing
+ elseif element == '..' then
+ local n = i -1
+ while n > 0 do
+ local element = oldelements[n]
+ if element ~= '..' and element ~= '.' then
+ oldelements[n] = '.'
+ break
+ else
+ n = n - 1
+ end
+ end
+ if n < 1 then
+ insert(newelements,1,'..')
+ end
+ elseif element ~= "" then
+ insert(newelements,1,element)
end
- str = gsub(str,"([^/])/$","%1")
- -- str = gsub(str,"^%./","") -- ./xx in qualified
- str = gsub(str,"/%.$","")
+ i = i - 1
+ end
+ if #newelements == 0 then
+ return starter or "."
+ elseif starter then
+ return starter .. concat(newelements, '/')
+ elseif find(str,"^/") then
+ return "/" .. concat(newelements,'/')
+ else
+ return concat(newelements, '/')
end
- if str == "" then str = "." end
- return str
end
---~ print(file.collapse_path("/a"))
---~ print(file.collapse_path("a/./b/.."))
---~ print(file.collapse_path("a/aa/../b/bb"))
---~ print(file.collapse_path("a/../.."))
---~ print(file.collapse_path("a/.././././b/.."))
---~ print(file.collapse_path("a/./././b/.."))
---~ print(file.collapse_path("a/b/c/../.."))
function file.robustname(str)
return (gsub(str,"[^%a%d%/%-%.\\]+","-"))
@@ -2262,92 +2307,23 @@ end
-- lpeg variants, slightly faster, not always
---~ local period = lpeg.P(".")
---~ local slashes = lpeg.S("\\/")
---~ local noperiod = 1-period
---~ local noslashes = 1-slashes
---~ local name = noperiod^1
-
---~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.C(noperiod^1) * -1
-
---~ function file.extname(name)
---~ return lpegmatch(pattern,name) or ""
---~ end
-
---~ local pattern = lpeg.Cs(((period * noperiod^1 * -1)/"" + 1)^1)
-
---~ function file.removesuffix(name)
---~ return lpegmatch(pattern,name)
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^1 * lpeg.C(noslashes^1) * -1
-
---~ function file.basename(name)
---~ return lpegmatch(pattern,name) or name
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^1 * lpeg.Cp() * noslashes^1 * -1
-
---~ function file.dirname(name)
---~ local p = lpegmatch(pattern,name)
---~ if p then
---~ return sub(name,1,p-2)
---~ else
---~ return ""
---~ end
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.Cp() * noperiod^1 * -1
-
---~ function file.addsuffix(name, suffix)
---~ local p = lpegmatch(pattern,name)
---~ if p then
---~ return name
---~ else
---~ return name .. "." .. suffix
---~ end
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.Cp() * noperiod^1 * -1
-
---~ function file.replacesuffix(name,suffix)
---~ local p = lpegmatch(pattern,name)
---~ if p then
---~ return sub(name,1,p-2) .. "." .. suffix
---~ else
---~ return name .. "." .. suffix
---~ end
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^0 * lpeg.Cp() * ((noperiod^1 * period)^1 * lpeg.Cp() + lpeg.P(true)) * noperiod^1 * -1
-
---~ function file.nameonly(name)
---~ local a, b = lpegmatch(pattern,name)
---~ if b then
---~ return sub(name,a,b-2)
---~ elseif a then
---~ return sub(name,a)
---~ else
---~ return name
---~ end
---~ end
-
---~ local test = file.extname
---~ local test = file.basename
---~ local test = file.dirname
---~ local test = file.addsuffix
---~ local test = file.replacesuffix
---~ local test = file.nameonly
-
---~ print(1,test("./a/b/c/abd.def.xxx","!!!"))
---~ print(2,test("./../b/c/abd.def.xxx","!!!"))
---~ print(3,test("a/b/c/abd.def.xxx","!!!"))
---~ print(4,test("a/b/c/def.xxx","!!!"))
---~ print(5,test("a/b/c/def","!!!"))
---~ print(6,test("def","!!!"))
---~ print(7,test("def.xxx","!!!"))
-
---~ local tim = os.clock() for i=1,250000 do local ext = test("abd.def.xxx","!!!") end print(os.clock()-tim)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-- also rewrite previous
@@ -2387,14 +2363,6 @@ end
-- test { "/aa", "/aa/bb", "/aa/bb/cc", "/aa/bb/cc.dd", "/aa/bb/cc.dd.ee" }
-- test { "aa", "aa/bb", "aa/bb/cc", "aa/bb/cc.dd", "aa/bb/cc.dd.ee" }
---~ -- todo:
---~
---~ if os.type == "windows" then
---~ local currentdir = lfs.currentdir
---~ function lfs.currentdir()
---~ return (gsub(currentdir(),"\\","/"))
---~ end
---~ end
end -- of closure
@@ -2420,18 +2388,6 @@ if not md5.HEX then function md5.HEX(str) return convert(str,"%02X") end end
if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end
if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end
---~ if not md5.HEX then
---~ local function remap(chr) return format("%02X",byte(chr)) end
---~ function md5.HEX(str) return (gsub(md5.sum(str),".",remap)) end
---~ end
---~ if not md5.hex then
---~ local function remap(chr) return format("%02x",byte(chr)) end
---~ function md5.hex(str) return (gsub(md5.sum(str),".",remap)) end
---~ end
---~ if not md5.dec then
---~ local function remap(chr) return format("%03i",byte(chr)) end
---~ function md5.dec(str) return (gsub(md5.sum(str),".",remap)) end
---~ end
file.needs_updating_threshold = 1
@@ -2487,9 +2443,10 @@ if not modules then modules = { } end modules ['l-url'] = {
license = "see context related readme files"
}
-local char, gmatch, gsub = string.char, string.gmatch, string.gsub
+local char, gmatch, gsub, format, byte = string.char, string.gmatch, string.gsub, string.format, string.byte
+local concat = table.concat
local tonumber, type = tonumber, type
-local lpegmatch = lpeg.match
+local lpegmatch, lpegP, lpegC, lpegR, lpegS, lpegCs, lpegCc = lpeg.match, lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc
-- from the spec (on the web):
--
@@ -2507,22 +2464,35 @@ local function tochar(s)
return char(tonumber(s,16))
end
-local colon, qmark, hash, slash, percent, endofstring = lpeg.P(":"), lpeg.P("?"), lpeg.P("#"), lpeg.P("/"), lpeg.P("%"), lpeg.P(-1)
+local colon, qmark, hash, slash, percent, endofstring = lpegP(":"), lpegP("?"), lpegP("#"), lpegP("/"), lpegP("%"), lpegP(-1)
-local hexdigit = lpeg.R("09","AF","af")
-local plus = lpeg.P("+")
-local escaped = (plus / " ") + (percent * lpeg.C(hexdigit * hexdigit) / tochar)
+local hexdigit = lpegR("09","AF","af")
+local plus = lpegP("+")
+local nothing = lpegCc("")
+local escaped = (plus / " ") + (percent * lpegC(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
-local scheme = lpeg.Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon + lpeg.Cc("")
-local authority = slash * slash * lpeg.Cs((escaped+(1- slash-qmark-hash))^0) + lpeg.Cc("")
-local path = slash * lpeg.Cs((escaped+(1- qmark-hash))^0) + lpeg.Cc("")
-local query = qmark * lpeg.Cs((escaped+(1- hash))^0) + lpeg.Cc("")
-local fragment = hash * lpeg.Cs((escaped+(1- endofstring))^0) + lpeg.Cc("")
+local scheme = lpegCs((escaped+(1-colon-slash-qmark-hash))^2) * colon + nothing
+local authority = slash * slash * lpegCs((escaped+(1- slash-qmark-hash))^0) + nothing
+local path = slash * lpegCs((escaped+(1- qmark-hash))^0) + nothing
+local query = qmark * lpegCs((escaped+(1- hash))^0) + nothing
+local fragment = hash * lpegCs((escaped+(1- endofstring))^0) + nothing
local parser = lpeg.Ct(scheme * authority * path * query * fragment)
+lpeg.patterns.urlsplitter = parser
+
+local escapes = { }
+
+for i=0,255 do
+ escapes[i] = format("%%%02X",i)
+end
+
+local escaper = lpeg.Cs((lpegR("09","AZ","az") + lpegS("-./_") + lpegP(1) / escapes)^0)
+
+lpeg.patterns.urlescaper = escaper
+
-- todo: reconsider Ct as we can as well have five return values (saves a table)
-- so we can have two parsers, one with and one without
@@ -2535,15 +2505,27 @@ end
function url.hashed(str)
local s = url.split(str)
local somescheme = s[1] ~= ""
- return {
- scheme = (somescheme and s[1]) or "file",
- authority = s[2],
- path = s[3],
- query = s[4],
- fragment = s[5],
- original = str,
- noscheme = not somescheme,
- }
+ if not somescheme then
+ return {
+ scheme = "file",
+ authority = "",
+ path = str,
+ query = "",
+ fragment = "",
+ original = str,
+ noscheme = true,
+ }
+ else
+ return {
+ scheme = s[1],
+ authority = s[2],
+ path = s[3],
+ query = s[4],
+ fragment = s[5],
+ original = str,
+ noscheme = false,
+ }
+ end
end
function url.hasscheme(str)
@@ -2554,15 +2536,25 @@ function url.addscheme(str,scheme)
return (url.hasscheme(str) and str) or ((scheme or "file:///") .. str)
end
-function url.construct(hash)
- local fullurl = hash.sheme .. "://".. hash.authority .. hash.path
- if hash.query then
- fullurl = fullurl .. "?".. hash.query
+function url.construct(hash) -- dodo: we need to escape !
+ local fullurl = { }
+ local scheme, authority, path, query, fragment = hash.scheme, hash.authority, hash.path, hash.query, hash.fragment
+ if scheme and scheme ~= "" then
+ fullurl[#fullurl+1] = scheme .. "://"
+ end
+ if authority and authority ~= "" then
+ fullurl[#fullurl+1] = authority
end
- if hash.fragment then
- fullurl = fullurl .. "?".. hash.fragment
+ if path and path ~= "" then
+ fullurl[#fullurl+1] = "/" .. path
end
- return fullurl
+ if query and query ~= "" then
+ fullurl[#fullurl+1] = "?".. query
+ end
+ if fragment and fragment ~= "" then
+ fullurl[#fullurl+1] = "#".. fragment
+ end
+ return lpegmatch(escaper,concat(fullurl))
end
function url.filename(filename)
@@ -2582,37 +2574,12 @@ function url.query(str)
end
end
---~ print(url.filename("file:///c:/oeps.txt"))
---~ print(url.filename("c:/oeps.txt"))
---~ print(url.filename("file:///oeps.txt"))
---~ print(url.filename("file:///etc/test.txt"))
---~ print(url.filename("/oeps.txt"))
-
---~ from the spec on the web (sort of):
---~
---~ function test(str)
---~ print(table.serialize(url.hashed(str)))
---~ end
---~
---~ test("%56pass%20words")
---~ test("file:///c:/oeps.txt")
---~ test("file:///c|/oeps.txt")
---~ test("file:///etc/oeps.txt")
---~ test("file://./etc/oeps.txt")
---~ test("file:////etc/oeps.txt")
---~ test("ftp://ftp.is.co.za/rfc/rfc1808.txt")
---~ test("http://www.ietf.org/rfc/rfc2396.txt")
---~ test("ldap://[2001:db8::7]/c=GB?objectClass?one#what")
---~ test("mailto:John.Doe@example.com")
---~ test("news:comp.infosystems.www.servers.unix")
---~ test("tel:+1-816-555-1212")
---~ test("telnet://192.0.2.16:80/")
---~ test("urn:oasis:names:specification:docbook:dtd:xml:4.1.2")
---~ test("/etc/passwords")
---~ test("http://www.pragma-ade.com/spaced%20name")
-
---~ test("zip:///oeps/oeps.zip#bla/bla.tex")
---~ test("zip:///oeps/oeps.zip?bla/bla.tex")
+
+
+
+
+
+
end -- of closure
@@ -2767,11 +2734,6 @@ end
dir.glob = glob
---~ list = dir.glob("**/*.tif")
---~ list = dir.glob("/**/*.tif")
---~ list = dir.glob("./**/*.tif")
---~ list = dir.glob("oeps/**/*.tif")
---~ list = dir.glob("/oeps/**/*.tif")
local function globfiles(path,recurse,func,files) -- func == pattern or function
if type(func) == "string" then
@@ -2815,10 +2777,6 @@ function dir.ls(pattern)
return table.concat(glob(pattern),"\n")
end
---~ mkdirs("temp")
---~ mkdirs("a/b/c")
---~ mkdirs(".","/a/b/c")
---~ mkdirs("a","b","c")
local make_indeed = true -- false
@@ -2878,17 +2836,6 @@ if string.find(os.getenv("PATH"),";") then -- os.type == "windows"
return pth, (lfs.isdir(pth) == true)
end
---~ print(dir.mkdirs("","","a","c"))
---~ print(dir.mkdirs("a"))
---~ print(dir.mkdirs("a:"))
---~ print(dir.mkdirs("a:/b/c"))
---~ print(dir.mkdirs("a:b/c"))
---~ print(dir.mkdirs("a:/bbb/c"))
---~ print(dir.mkdirs("/a/b/c"))
---~ print(dir.mkdirs("/aaa/b/c"))
---~ print(dir.mkdirs("//a/b/c"))
---~ print(dir.mkdirs("///a/b/c"))
---~ print(dir.mkdirs("a/bbb//ccc/"))
function dir.expand_name(str) -- will be merged with cleanpath and collapsepath
local first, nothing, last = match(str,"^(//)(//*)(.*)$")
@@ -2928,7 +2875,7 @@ else
local str, pth, t = "", "", { ... }
for i=1,#t do
local s = t[i]
- if s ~= "" then
+ if s and s ~= "" then -- we catch nil and false
if str ~= "" then
str = str .. "/" .. s
else
@@ -2962,13 +2909,6 @@ else
return pth, (lfs.isdir(pth) == true)
end
---~ print(dir.mkdirs("","","a","c"))
---~ print(dir.mkdirs("a"))
---~ print(dir.mkdirs("/a/b/c"))
---~ print(dir.mkdirs("/aaa/b/c"))
---~ print(dir.mkdirs("//a/b/c"))
---~ print(dir.mkdirs("///a/b/c"))
---~ print(dir.mkdirs("a/bbb//ccc/"))
function dir.expand_name(str) -- will be merged with cleanpath and collapsepath
if not find(str,"^/") then
@@ -3025,7 +2965,7 @@ function toboolean(str,tolerant)
end
end
-function string.is_boolean(str)
+function string.is_boolean(str,default)
if type(str) == "string" then
if str == "true" or str == "yes" or str == "on" or str == "t" then
return true
@@ -3033,7 +2973,7 @@ function string.is_boolean(str)
return false
end
end
- return nil
+ return default
end
function boolean.alwaystrue()
@@ -3049,6 +2989,211 @@ end -- of closure
do -- create closure to overcome 200 locals limit
+if not modules then modules = { } end modules ['l-unicode'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if not unicode then
+
+ unicode = { utf8 = { } }
+
+ local floor, char = math.floor, string.char
+
+ function unicode.utf8.utfchar(n)
+ if n < 0x80 then
+ return char(n)
+ elseif n < 0x800 then
+ return char(0xC0 + floor(n/0x40)) .. char(0x80 + (n % 0x40))
+ elseif n < 0x10000 then
+ return char(0xE0 + floor(n/0x1000)) .. char(0x80 + (floor(n/0x40) % 0x40)) .. char(0x80 + (n % 0x40))
+ elseif n < 0x40000 then
+ return char(0xF0 + floor(n/0x40000)) .. char(0x80 + floor(n/0x1000)) .. char(0x80 + (floor(n/0x40) % 0x40)) .. char(0x80 + (n % 0x40))
+ else -- wrong:
+ -- return char(0xF1 + floor(n/0x1000000)) .. char(0x80 + floor(n/0x40000)) .. char(0x80 + floor(n/0x1000)) .. char(0x80 + (floor(n/0x40) % 0x40)) .. char(0x80 + (n % 0x40))
+ return "?"
+ end
+ end
+
+end
+
+utf = utf or unicode.utf8
+
+local concat, utfchar, utfgsub = table.concat, utf.char, utf.gsub
+local char, byte, find, bytepairs = string.char, string.byte, string.find, string.bytepairs
+
+-- 0 EF BB BF UTF-8
+-- 1 FF FE UTF-16-little-endian
+-- 2 FE FF UTF-16-big-endian
+-- 3 FF FE 00 00 UTF-32-little-endian
+-- 4 00 00 FE FF UTF-32-big-endian
+
+unicode.utfname = {
+ [0] = 'utf-8',
+ [1] = 'utf-16-le',
+ [2] = 'utf-16-be',
+ [3] = 'utf-32-le',
+ [4] = 'utf-32-be'
+}
+
+-- \000 fails in <= 5.0 but is valid in >=5.1 where %z is depricated
+
+function unicode.utftype(f)
+ local str = f:read(4)
+ if not str then
+ f:seek('set')
+ return 0
+ -- elseif find(str,"^%z%z\254\255") then -- depricated
+ -- elseif find(str,"^\000\000\254\255") then -- not permitted and bugged
+ elseif find(str,"\000\000\254\255",1,true) then -- seems to work okay (TH)
+ return 4
+ -- elseif find(str,"^\255\254%z%z") then -- depricated
+ -- elseif find(str,"^\255\254\000\000") then -- not permitted and bugged
+ elseif find(str,"\255\254\000\000",1,true) then -- seems to work okay (TH)
+ return 3
+ elseif find(str,"^\254\255") then
+ f:seek('set',2)
+ return 2
+ elseif find(str,"^\255\254") then
+ f:seek('set',2)
+ return 1
+ elseif find(str,"^\239\187\191") then
+ f:seek('set',3)
+ return 0
+ else
+ f:seek('set')
+ return 0
+ end
+end
+
+function unicode.utf16_to_utf8(str, endian) -- maybe a gsub is faster or an lpeg
+ local result, tmp, n, m, p = { }, { }, 0, 0, 0
+ -- lf | cr | crlf / (cr:13, lf:10)
+ local function doit()
+ if n == 10 then
+ if p ~= 13 then
+ result[#result+1] = concat(tmp)
+ tmp = { }
+ p = 0
+ end
+ elseif n == 13 then
+ result[#result+1] = concat(tmp)
+ tmp = { }
+ p = n
+ else
+ tmp[#tmp+1] = utfchar(n)
+ p = 0
+ end
+ end
+ for l,r in bytepairs(str) do
+ if r then
+ if endian then
+ n = l*256 + r
+ else
+ n = r*256 + l
+ end
+ if m > 0 then
+ n = (m-0xD800)*0x400 + (n-0xDC00) + 0x10000
+ m = 0
+ doit()
+ elseif n >= 0xD800 and n <= 0xDBFF then
+ m = n
+ else
+ doit()
+ end
+ end
+ end
+ if #tmp > 0 then
+ result[#result+1] = concat(tmp)
+ end
+ return result
+end
+
+function unicode.utf32_to_utf8(str, endian)
+ local result = { }
+ local tmp, n, m, p = { }, 0, -1, 0
+ -- lf | cr | crlf / (cr:13, lf:10)
+ local function doit()
+ if n == 10 then
+ if p ~= 13 then
+ result[#result+1] = concat(tmp)
+ tmp = { }
+ p = 0
+ end
+ elseif n == 13 then
+ result[#result+1] = concat(tmp)
+ tmp = { }
+ p = n
+ else
+ tmp[#tmp+1] = utfchar(n)
+ p = 0
+ end
+ end
+ for a,b in bytepairs(str) do
+ if a and b then
+ if m < 0 then
+ if endian then
+ m = a*256*256*256 + b*256*256
+ else
+ m = b*256 + a
+ end
+ else
+ if endian then
+ n = m + a*256 + b
+ else
+ n = m + b*256*256*256 + a*256*256
+ end
+ m = -1
+ doit()
+ end
+ else
+ break
+ end
+ end
+ if #tmp > 0 then
+ result[#result+1] = concat(tmp)
+ end
+ return result
+end
+
+local function little(c)
+ local b = byte(c) -- b = c:byte()
+ if b < 0x10000 then
+ return char(b%256,b/256)
+ else
+ b = b - 0x10000
+ local b1, b2 = b/1024 + 0xD800, b%1024 + 0xDC00
+ return char(b1%256,b1/256,b2%256,b2/256)
+ end
+end
+
+local function big(c)
+ local b = byte(c)
+ if b < 0x10000 then
+ return char(b/256,b%256)
+ else
+ b = b - 0x10000
+ local b1, b2 = b/1024 + 0xD800, b%1024 + 0xDC00
+ return char(b1/256,b1%256,b2/256,b2%256)
+ end
+end
+
+function unicode.utf8_to_utf16(str,littleendian)
+ if littleendian then
+ return char(255,254) .. utfgsub(str,".",little)
+ else
+ return char(254,255) .. utfgsub(str,".",big)
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
if not modules then modules = { } end modules ['l-math'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -3106,7 +3251,7 @@ if not modules then modules = { } end modules ['l-utils'] = {
-- hm, quite unreadable
-local gsub = string.gsub
+local gsub, format = string.gsub, string.format
local concat = table.concat
local type, next = type, next
@@ -3114,81 +3259,79 @@ if not utils then utils = { } end
if not utils.merger then utils.merger = { } end
if not utils.lua then utils.lua = { } end
-utils.merger.m_begin = "begin library merge"
-utils.merger.m_end = "end library merge"
-utils.merger.pattern =
+utils.report = utils.report or print
+
+local merger = utils.merger
+
+merger.strip_comment = true
+
+local m_begin_merge = "begin library merge"
+local m_end_merge = "end library merge"
+local m_begin_closure = "do -- create closure to overcome 200 locals limit"
+local m_end_closure = "end -- of closure"
+
+local m_pattern =
"%c+" ..
- "%-%-%s+" .. utils.merger.m_begin ..
+ "%-%-%s+" .. m_begin_merge ..
"%c+(.-)%c+" ..
- "%-%-%s+" .. utils.merger.m_end ..
+ "%-%-%s+" .. m_end_merge ..
"%c+"
-function utils.merger._self_fake_()
- return
- "-- " .. "created merged file" .. "\n\n" ..
- "-- " .. utils.merger.m_begin .. "\n\n" ..
- "-- " .. utils.merger.m_end .. "\n\n"
-end
+local m_format =
+ "\n\n-- " .. m_begin_merge ..
+ "\n%s\n" ..
+ "-- " .. m_end_merge .. "\n\n"
-function utils.report(...)
- print(...)
+local m_faked =
+ "-- " .. "created merged file" .. "\n\n" ..
+ "-- " .. m_begin_merge .. "\n\n" ..
+ "-- " .. m_end_merge .. "\n\n"
+
+local function self_fake()
+ return m_faked
end
-utils.merger.strip_comment = true
+local function self_nothing()
+ return ""
+end
-function utils.merger._self_load_(name)
- local f, data = io.open(name), ""
- if f then
- utils.report("reading merge from %s",name)
- data = f:read("*all")
- f:close()
+local function self_load(name)
+ local data = io.loaddata(name) or ""
+ if data == "" then
+ utils.report("merge: unknown file %s",name)
else
- utils.report("unknown file to merge %s",name)
- end
- if data and utils.merger.strip_comment then
- -- saves some 20K
- data = gsub(data,"%-%-~[^\n\r]*[\r\n]", "")
+ utils.report("merge: inserting %s",name)
end
return data or ""
end
-function utils.merger._self_save_(name, data)
+local function self_save(name, data)
if data ~= "" then
- local f = io.open(name,'w')
- if f then
- utils.report("saving merge from %s",name)
- f:write(data)
- f:close()
+ if merger.strip_comment then
+ -- saves some 20K
+ local n = #data
+ data = gsub(data,"%-%-~[^\n\r]*[\r\n]","")
+ utils.report("merge: %s bytes of comment stripped, %s bytes of code left",n-#data,#data)
end
+ io.savedata(name,data)
+ utils.report("merge: saving %s",name)
end
end
-function utils.merger._self_swap_(data,code)
- if data ~= "" then
- return (gsub(data,utils.merger.pattern, function(s)
- return "\n\n" .. "-- "..utils.merger.m_begin .. "\n" .. code .. "\n" .. "-- "..utils.merger.m_end .. "\n\n"
- end, 1))
- else
- return ""
- end
+local function self_swap(data,code)
+ return data ~= "" and (gsub(data,m_pattern, function() return format(m_format,code) end, 1)) or ""
end
---~ stripper:
---~
---~ data = gsub(data,"%-%-~[^\n]*\n","")
---~ data = gsub(data,"\n\n+","\n")
-
-function utils.merger._self_libs_(libs,list)
- local result, f, frozen = { }, nil, false
+local function self_libs(libs,list)
+ local result, f, frozen, foundpath = { }, nil, false, nil
result[#result+1] = "\n"
if type(libs) == 'string' then libs = { libs } end
if type(list) == 'string' then list = { list } end
- local foundpath = nil
for i=1,#libs do
local lib = libs[i]
for j=1,#list do
local pth = gsub(list[j],"\\","/") -- file.clean_path
- utils.report("checking library path %s",pth)
+ utils.report("merge: checking library path %s",pth)
local name = pth .. "/" .. lib
if lfs.isfile(name) then
foundpath = pth
@@ -3197,76 +3340,58 @@ function utils.merger._self_libs_(libs,list)
if foundpath then break end
end
if foundpath then
- utils.report("using library path %s",foundpath)
+ utils.report("merge: using library path %s",foundpath)
local right, wrong = { }, { }
for i=1,#libs do
local lib = libs[i]
local fullname = foundpath .. "/" .. lib
if lfs.isfile(fullname) then
- -- right[#right+1] = lib
- utils.report("merging library %s",fullname)
- result[#result+1] = "do -- create closure to overcome 200 locals limit"
+ utils.report("merge: using library %s",fullname)
+ right[#right+1] = lib
+ result[#result+1] = m_begin_closure
result[#result+1] = io.loaddata(fullname,true)
- result[#result+1] = "end -- of closure"
+ result[#result+1] = m_end_closure
else
- -- wrong[#wrong+1] = lib
- utils.report("no library %s",fullname)
+ utils.report("merge: skipping library %s",fullname)
+ wrong[#wrong+1] = lib
end
end
if #right > 0 then
- utils.report("merged libraries: %s",concat(right," "))
+ utils.report("merge: used libraries: %s",concat(right," "))
end
if #wrong > 0 then
- utils.report("skipped libraries: %s",concat(wrong," "))
+ utils.report("merge: skipped libraries: %s",concat(wrong," "))
end
else
- utils.report("no valid library path found")
+ utils.report("merge: no valid library path found")
end
return concat(result, "\n\n")
end
-function utils.merger.selfcreate(libs,list,target)
+function merger.selfcreate(libs,list,target)
if target then
- utils.merger._self_save_(
- target,
- utils.merger._self_swap_(
- utils.merger._self_fake_(),
- utils.merger._self_libs_(libs,list)
- )
- )
- end
-end
-
-function utils.merger.selfmerge(name,libs,list,target)
- utils.merger._self_save_(
- target or name,
- utils.merger._self_swap_(
- utils.merger._self_load_(name),
- utils.merger._self_libs_(libs,list)
- )
- )
+ self_save(target,self_swap(self_fake(),self_libs(libs,list)))
+ end
end
-function utils.merger.selfclean(name)
- utils.merger._self_save_(
- name,
- utils.merger._self_swap_(
- utils.merger._self_load_(name),
- ""
- )
- )
+function merger.selfmerge(name,libs,list,target)
+ self_save(target or name,self_swap(self_load(name),self_libs(libs,list)))
end
-function utils.lua.compile(luafile, lucfile, cleanup, strip) -- defaults: cleanup=false strip=true
- -- utils.report("compiling",luafile,"into",lucfile)
+function merger.selfclean(name)
+ self_save(name,self_swap(self_load(name),self_nothing()))
+end
+
+function utils.lua.compile(luafile,lucfile,cleanup,strip) -- defaults: cleanup=false strip=true
+ utils.report("lua: compiling %s into %s",luafile,lucfile)
os.remove(lucfile)
local command = "-o " .. string.quote(lucfile) .. " " .. string.quote(luafile)
if strip ~= false then
command = "-s " .. command
end
- local done = (os.spawn("texluac " .. command) == 0) or (os.spawn("luac " .. command) == 0)
+ local done = os.spawn("texluac " .. command) == 0 or os.spawn("luac " .. command) == 0
if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
- -- utils.report("removing",luafile)
+ utils.report("lua: removing %s",luafile)
os.remove(luafile)
end
return done
@@ -3350,11 +3475,7 @@ end
function aux.settings_to_hash(str,existing)
if str and str ~= "" then
hash = existing or { }
- if moretolerant then
- lpegmatch(pattern_b_s,str)
- else
- lpegmatch(pattern_a_s,str)
- end
+ lpegmatch(pattern_a_s,str)
return hash
else
return { }
@@ -3484,12 +3605,6 @@ local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "")
local number = digit^1 * (case_1 + case_2)
local stripper = lpeg.Cs((number + 1)^0)
---~ local sample = "bla 11.00 bla 11 bla 0.1100 bla 1.00100 bla 0.00 bla 0.001 bla 1.1100 bla 0.100100100 bla 0.00100100100"
---~ collectgarbage("collect")
---~ str = string.rep(sample,10000)
---~ local ts = os.clock()
---~ lpegmatch(stripper,str)
---~ print(#str, os.clock()-ts, lpegmatch(stripper,sample))
lpeg.patterns.strip_zeros = stripper
@@ -3518,235 +3633,305 @@ function aux.accesstable(target)
return t
end
---~ function string.commaseparated(str)
---~ return gmatch(str,"([^,%s]+)")
---~ end
-- as we use this a lot ...
---~ function aux.cachefunction(action,weak)
---~ local cache = { }
---~ if weak then
---~ setmetatable(cache, { __mode = "kv" } )
---~ end
---~ local function reminder(str)
---~ local found = cache[str]
---~ if not found then
---~ found = action(str)
---~ cache[str] = found
---~ end
---~ return found
---~ end
---~ return reminder, cache
---~ end
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['trac-tra'] = {
+if not modules then modules = { } end modules ['trac-inf'] = {
version = 1.001,
- comment = "companion to trac-tra.mkiv",
+ comment = "companion to trac-inf.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
--- the <anonymous> tag is kind of generic and used for functions that are not
--- bound to a variable, like node.new, node.copy etc (contrary to for instance
--- node.has_attribute which is bound to a has_attribute local variable in mkiv)
+-- As we want to protect the global tables, we no longer store the timing
+-- in the tables themselves but in a hidden timers table so that we don't
+-- get warnings about assignments. This is more efficient than using rawset
+-- and rawget.
-local debug = require "debug"
+local format = string.format
+local clock = os.gettimeofday or os.clock -- should go in environment
-local getinfo = debug.getinfo
-local type, next = type, next
-local concat = table.concat
-local format, find, lower, gmatch, gsub = string.format, string.find, string.lower, string.gmatch, string.gsub
+local statusinfo, n, registered = { }, 0, { }
-debugger = debugger or { }
+statistics = statistics or { }
-local counters = { }
-local names = { }
+statistics.enable = true
+statistics.threshold = 0.05
--- one
+local timers = { }
-local function hook()
- local f = getinfo(2,"f").func
- local n = getinfo(2,"Sn")
--- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
- if f then
- local cf = counters[f]
- if cf == nil then
- counters[f] = 1
- names[f] = n
- else
- counters[f] = cf + 1
- end
- end
+local function hastiming(instance)
+ return instance and timers[instance]
end
-local function getname(func)
- local n = names[func]
- if n then
- if n.what == "C" then
- return n.name or '<anonymous>'
- else
- -- source short_src linedefined what name namewhat nups func
- local name = n.name or n.namewhat or n.what
- if not name or name == "" then name = "?" end
- return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+
+local function resettiming(instance)
+ timers[instance or "notimer"] = { timing = 0, loadtime = 0 }
+end
+
+local function starttiming(instance)
+ local timer = timers[instance or "notimer"]
+ if not timer then
+ timer = { }
+ timers[instance or "notimer"] = timer
+ end
+ local it = timer.timing
+ if not it then
+ it = 0
+ end
+ if it == 0 then
+ timer.starttime = clock()
+ if not timer.loadtime then
+ timer.loadtime = 0
end
- else
- return "unknown"
end
+ timer.timing = it + 1
end
-function debugger.showstats(printer,threshold)
- printer = printer or texio.write or print
- threshold = threshold or 0
- local total, grandtotal, functions = 0, 0, 0
- printer("\n") -- ugly but ok
- -- table.sort(counters)
- for func, count in next, counters do
- if count > threshold then
- local name = getname(func)
- if not find(name,"for generator") then
- printer(format("%8i %s", count, name))
- total = total + count
+
+local function stoptiming(instance, report)
+ local timer = timers[instance or "notimer"]
+ local it = timer.timing
+ if it > 1 then
+ timer.timing = it - 1
+ else
+ local starttime = timer.starttime
+ if starttime then
+ local stoptime = clock()
+ local loadtime = stoptime - starttime
+ timer.stoptime = stoptime
+ timer.loadtime = timer.loadtime + loadtime
+ if report then
+ statistics.report("load time %0.3f",loadtime)
end
+ timer.timing = 0
+ return loadtime
end
- grandtotal = grandtotal + count
- functions = functions + 1
end
- printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+ return 0
end
--- two
-
---~ local function hook()
---~ local n = getinfo(2)
---~ if n.what=="C" and not n.name then
---~ local f = tostring(debug.traceback())
---~ local cf = counters[f]
---~ if cf == nil then
---~ counters[f] = 1
---~ names[f] = n
---~ else
---~ counters[f] = cf + 1
---~ end
---~ end
---~ end
---~ function debugger.showstats(printer,threshold)
---~ printer = printer or texio.write or print
---~ threshold = threshold or 0
---~ local total, grandtotal, functions = 0, 0, 0
---~ printer("\n") -- ugly but ok
---~ -- table.sort(counters)
---~ for func, count in next, counters do
---~ if count > threshold then
---~ printer(format("%8i %s", count, func))
---~ total = total + count
---~ end
---~ grandtotal = grandtotal + count
---~ functions = functions + 1
---~ end
---~ printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
---~ end
+local function elapsedtime(instance)
+ local timer = timers[instance or "notimer"]
+ return format("%0.3f",timer and timer.loadtime or 0)
+end
--- rest
+local function elapsedindeed(instance)
+ local timer = timers[instance or "notimer"]
+ return (timer and timer.loadtime or 0) > statistics.threshold
+end
-function debugger.savestats(filename,threshold)
- local f = io.open(filename,'w')
- if f then
- debugger.showstats(function(str) f:write(str) end,threshold)
- f:close()
+local function elapsedseconds(instance,rest) -- returns nil if 0 seconds
+ if elapsedindeed(instance) then
+ return format("%s seconds %s", elapsedtime(instance),rest or "")
end
end
-function debugger.enable()
- debug.sethook(hook,"c")
+statistics.hastiming = hastiming
+statistics.resettiming = resettiming
+statistics.starttiming = starttiming
+statistics.stoptiming = stoptiming
+statistics.elapsedtime = elapsedtime
+statistics.elapsedindeed = elapsedindeed
+statistics.elapsedseconds = elapsedseconds
+
+-- general function
+
+function statistics.register(tag,fnc)
+ if statistics.enable and type(fnc) == "function" then
+ local rt = registered[tag] or (#statusinfo + 1)
+ statusinfo[rt] = { tag, fnc }
+ registered[tag] = rt
+ if #tag > n then n = #tag end
+ end
end
-function debugger.disable()
- debug.sethook()
---~ counters[debug.getinfo(2,"f").func] = nil
+function statistics.show(reporter)
+ if statistics.enable then
+ if not reporter then reporter = function(tag,data,n) texio.write_nl(tag .. " " .. data) end end
+ -- this code will move
+ local register = statistics.register
+ register("luatex banner", function()
+ return string.lower(status.banner)
+ end)
+ register("control sequences", function()
+ return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
+ end)
+ register("callbacks", function()
+ local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
+ return format("direct: %s, indirect: %s, total: %s", total-indirect, indirect, total)
+ end)
+ register("current memory usage", statistics.memused)
+ register("runtime",statistics.runtime)
+ for i=1,#statusinfo do
+ local s = statusinfo[i]
+ local r = s[2]()
+ if r then
+ reporter(s[1],r,n)
+ end
+ end
+ texio.write_nl("") -- final newline
+ statistics.enable = false
+ end
end
-function debugger.tracing()
- local n = tonumber(os.env['MTX.TRACE.CALLS']) or tonumber(os.env['MTX_TRACE_CALLS']) or 0
- if n > 0 then
- function debugger.tracing() return true end ; return true
+function statistics.show_job_stat(tag,data,n)
+ if type(data) == "table" then
+ for i=1,#data do
+ statistics.show_job_stat(tag,data[i],n)
+ end
else
- function debugger.tracing() return false end ; return false
+ texio.write_nl(format("%-15s: %s - %s","mkiv lua stats",tag:rpadd(n," "),data))
end
end
---~ debugger.enable()
+function statistics.memused() -- no math.round yet -)
+ local round = math.round or math.floor
+ return format("%s MB (ctx: %s MB)",round(collectgarbage("count")/1000), round(status.luastate_bytes/1000000))
+end
+
+starttiming(statistics)
+
+function statistics.formatruntime(runtime) -- indirect so it can be overloaded and
+ return format("%s seconds", runtime) -- indeed that happens in cure-uti.lua
+end
+
+function statistics.runtime()
+ stoptiming(statistics)
+ return statistics.formatruntime(elapsedtime(statistics))
+end
+
+function statistics.timed(action,report)
+ report = report or logs.simple
+ starttiming("run")
+ action()
+ stoptiming("run")
+ report("total runtime: %s",elapsedtime("run"))
+end
+
+-- where, not really the best spot for this:
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
+commands = commands or { }
+
+function commands.resettimer(name)
+ resettiming(name or "whatever")
+ starttiming(name or "whatever")
+end
---~ debugger.disable()
+function commands.elapsedtime(name)
+ stoptiming(name or "whatever")
+ tex.sprint(elapsedtime(name or "whatever"))
+end
---~ print("")
---~ debugger.showstats()
---~ print("")
---~ debugger.showstats(print,3)
-setters = setters or { }
-setters.data = setters.data or { }
+end -- of closure
---~ local function set(t,what,value)
---~ local data, done = t.data, t.done
---~ if type(what) == "string" then
---~ what = aux.settings_to_array(what) -- inefficient but ok
---~ end
---~ for i=1,#what do
---~ local w = what[i]
---~ for d, f in next, data do
---~ if done[d] then
---~ -- prevent recursion due to wildcards
---~ elseif find(d,w) then
---~ done[d] = true
---~ for i=1,#f do
---~ f[i](value)
---~ end
---~ end
---~ end
---~ end
---~ end
+do -- create closure to overcome 200 locals limit
-local function set(t,what,value)
+if not modules then modules = { } end modules ['trac-set'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local type, next, tostring = type, next, tostring
+local concat = table.concat
+local format, find, lower, gsub = string.format, string.find, string.lower, string.gsub
+local is_boolean = string.is_boolean
+
+setters = { }
+
+local data = { } -- maybe just local
+
+-- We can initialize from the cnf file. This is sort of tricky as
+-- laster defined setters also need to be initialized then. If set
+-- this way, we need to ensure that they are not reset later on.
+
+local trace_initialize = false
+
+local function report(what,filename,name,key,value)
+ texio.write_nl(format("%s setter, filename: %s, name: %s, key: %s, value: %s",what,filename,name,key,value))
+end
+
+function setters.initialize(filename,name,values) -- filename only for diagnostics
+ local data = data[name]
+ if data then
+ data = data.data
+ if data then
+ for key, value in next, values do
+ key = gsub(key,"_",".")
+ value = is_boolean(value,value)
+ local functions = data[key]
+ if functions then
+ if #functions > 0 and not functions.value then
+ if trace_initialize then
+ report("doing",filename,name,key,value)
+ end
+ for i=1,#functions do
+ functions[i](value)
+ end
+ functions.value = value
+ else
+ if trace_initialize then
+ report("skipping",filename,name,key,value)
+ end
+ end
+ else
+ -- we do a simple preregistration i.e. not in the
+ -- list as it might be an obsolete entry
+ functions = { default = value }
+ data[key] = functions
+ if trace_initialize then
+ report("storing",filename,name,key,value)
+ end
+ end
+ end
+ end
+ end
+end
+
+-- user interface code
+
+local function set(t,what,newvalue)
local data, done = t.data, t.done
if type(what) == "string" then
what = aux.settings_to_hash(what) -- inefficient but ok
end
- for w, v in next, what do
- if v == "" then
- v = value
+ for w, value in next, what do
+ if value == "" then
+ value = newvalue
+ elseif not value then
+ value = false -- catch nil
else
- v = toboolean(v)
+ value = is_boolean(value,value)
end
- for d, f in next, data do
- if done[d] then
+ for name, functions in next, data do
+ if done[name] then
-- prevent recursion due to wildcards
- elseif find(d,w) then
- done[d] = true
- for i=1,#f do
- f[i](v)
+ elseif find(name,w) then
+ done[name] = true
+ for i=1,#functions do
+ functions[i](value)
end
+ functions.value = value
end
end
end
end
local function reset(t)
- for d, f in next, t.data do
- for i=1,#f do
- f[i](false)
+ for name, functions in next, t.data do
+ for i=1,#functions do
+ functions[i](false)
end
+ functions.value = false
end
end
@@ -3767,17 +3952,26 @@ end
function setters.register(t,what,...)
local data = t.data
what = lower(what)
- local w = data[what]
- if not w then
- w = { }
- data[what] = w
+ local functions = data[what]
+ if not functions then
+ functions = { }
+ data[what] = functions
end
+ local default = functions.default -- can be set from cnf file
for _, fnc in next, { ... } do
local typ = type(fnc)
- if typ == "function" then
- w[#w+1] = fnc
- elseif typ == "string" then
- w[#w+1] = function(value) set(t,fnc,value,nesting) end
+ if typ == "string" then
+ local s = fnc -- else wrong reference
+ fnc = function(value) set(t,s,value) end
+ elseif typ ~= "function" then
+ fnc = nil
+ end
+ if fnc then
+ functions[#functions+1] = fnc
+ if default then
+ fnc(default)
+ functions.value = default
+ end
end
end
end
@@ -3818,8 +4012,16 @@ end
function setters.show(t)
commands.writestatus("","")
local list = setters.list(t)
+ local category = t.name
for k=1,#list do
- commands.writestatus(t.name,list[k])
+ local name = list[k]
+ local functions = t.data[name]
+ if functions then
+ local value, default, modules = functions.value, functions.default, #functions
+ value = value == nil and "unset" or tostring(value)
+ default = default == nil and "unset" or tostring(default)
+ commands.writestatus(category,format("%-25s modules: %2i default: %5s value: %5s",name,modules,default,value))
+ end
end
commands.writestatus("","")
end
@@ -3832,7 +4034,7 @@ end
function setters.new(name)
local t
t = {
- data = { },
+ data = { }, -- indexed, but also default and value fields
name = name,
enable = function(...) setters.enable (t,...) end,
disable = function(...) setters.disable (t,...) end,
@@ -3840,7 +4042,7 @@ function setters.new(name)
list = function(...) setters.list (t,...) end,
show = function(...) setters.show (t,...) end,
}
- setters.data[name] = t
+ data[name] = t
return t
end
@@ -3858,12 +4060,12 @@ local e = directives.enable
local d = directives.disable
function directives.enable(...)
- commands.writestatus("directives","enabling: %s",concat({...}," "))
+ (commands.writestatus or logs.report)("directives","enabling: %s",concat({...}," "))
e(...)
end
function directives.disable(...)
- commands.writestatus("directives","disabling: %s",concat({...}," "))
+ (commands.writestatus or logs.report)("directives","disabling: %s",concat({...}," "))
d(...)
end
@@ -3871,12 +4073,12 @@ local e = experiments.enable
local d = experiments.disable
function experiments.enable(...)
- commands.writestatus("experiments","enabling: %s",concat({...}," "))
+ (commands.writestatus or logs.report)("experiments","enabling: %s",concat({...}," "))
e(...)
end
function experiments.disable(...)
- commands.writestatus("experiments","disabling: %s",concat({...}," "))
+ (commands.writestatus or logs.report)("experiments","disabling: %s",concat({...}," "))
d(...)
end
@@ -3887,6 +4089,946 @@ directives.register("system.nostatistics", function(v)
end)
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['trac-tra'] = {
+ version = 1.001,
+ comment = "companion to trac-tra.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- the <anonymous> tag is kind of generic and used for functions that are not
+-- bound to a variable, like node.new, node.copy etc (contrary to for instance
+-- node.has_attribute which is bound to a has_attribute local variable in mkiv)
+
+local debug = require "debug"
+
+local getinfo = debug.getinfo
+local type, next = type, next
+local format, find = string.format, string.find
+local is_boolean = string.is_boolean
+
+debugger = debugger or { }
+
+local counters = { }
+local names = { }
+
+-- one
+
+local function hook()
+ local f = getinfo(2,"f").func
+ local n = getinfo(2,"Sn")
+-- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
+ if f then
+ local cf = counters[f]
+ if cf == nil then
+ counters[f] = 1
+ names[f] = n
+ else
+ counters[f] = cf + 1
+ end
+ end
+end
+
+local function getname(func)
+ local n = names[func]
+ if n then
+ if n.what == "C" then
+ return n.name or '<anonymous>'
+ else
+ -- source short_src linedefined what name namewhat nups func
+ local name = n.name or n.namewhat or n.what
+ if not name or name == "" then name = "?" end
+ return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+ end
+ else
+ return "unknown"
+ end
+end
+
+function debugger.showstats(printer,threshold)
+ printer = printer or texio.write or print
+ threshold = threshold or 0
+ local total, grandtotal, functions = 0, 0, 0
+ printer("\n") -- ugly but ok
+ -- table.sort(counters)
+ for func, count in next, counters do
+ if count > threshold then
+ local name = getname(func)
+ if not find(name,"for generator") then
+ printer(format("%8i %s", count, name))
+ total = total + count
+ end
+ end
+ grandtotal = grandtotal + count
+ functions = functions + 1
+ end
+ printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+end
+
+-- two
+
+
+-- rest
+
+function debugger.savestats(filename,threshold)
+ local f = io.open(filename,'w')
+ if f then
+ debugger.showstats(function(str) f:write(str) end,threshold)
+ f:close()
+ end
+end
+
+function debugger.enable()
+ debug.sethook(hook,"c")
+end
+
+function debugger.disable()
+ debug.sethook()
+end
+
+local function trace_calls(n)
+ debugger.enable()
+ luatex.register_stop_actions(function()
+ debugger.disable()
+ debugger.savestats(tex.jobname .. "-luacalls.log",tonumber(n))
+ end)
+ trace_calls = function() end
+end
+
+if directives then
+ directives.register("system.tracecalls", function(n) trace_calls(n) end) -- indirect is needed for nilling
+end
+
+
+
+
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['trac-log'] = {
+ version = 1.001,
+ comment = "companion to trac-log.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- xml logging is only usefull in normal runs, not in ini mode
+-- it looks like some tex logging (like filenames) is broken (no longer
+-- interceoted at the tex end so the xml variant is not that useable now)
+
+
+local write_nl, write = texio and texio.write_nl or print, texio and texio.write or io.write
+local format, gmatch = string.format, string.gmatch
+local texcount = tex and tex.count
+
+--[[ldx--
+<p>This is a prelude to a more extensive logging module. For the sake
+of parsing log files, in addition to the standard logging we will
+provide an <l n='xml'/> structured file. Actually, any logging that
+is hooked into callbacks will be \XML\ by default.</p>
+--ldx]]--
+
+logs = logs or { }
+
+--[[ldx--
+<p>This looks pretty ugly but we need to speed things up a bit.</p>
+--ldx]]--
+
+local moreinfo = [[
+More information about ConTeXt and the tools that come with it can be found at:
+
+maillist : ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+webpage : http://www.pragma-ade.nl / http://tex.aanhet.net
+wiki : http://contextgarden.net
+]]
+
+local functions = {
+ 'report', 'status', 'start', 'stop', 'push', 'pop', 'line', 'direct',
+ 'start_run', 'stop_run',
+ 'start_page_number', 'stop_page_number',
+ 'report_output_pages', 'report_output_log',
+ 'report_tex_stat', 'report_job_stat',
+ 'show_open', 'show_close', 'show_load',
+ 'dummy',
+}
+
+local method = "nop"
+
+function logs.set_method(newmethod)
+ method = newmethod
+ -- a direct copy might be faster but let's try this for a while
+ setmetatable(logs, { __index = logs[method] })
+end
+
+function logs.get_method()
+ return method
+end
+
+-- installer
+
+local data = { }
+
+function logs.new(category)
+ local logger = data[category]
+ if not logger then
+ logger = function(...)
+ logs.report(category,...)
+ end
+ data[category] = logger
+ end
+ return logger
+end
+
+
+
+-- nop logging (maybe use __call instead)
+
+local noplog = { } logs.nop = noplog setmetatable(logs, { __index = noplog })
+
+for i=1,#functions do
+ noplog[functions[i]] = function() end
+end
+
+-- tex logging
+
+local texlog = { } logs.tex = texlog setmetatable(texlog, { __index = noplog })
+
+function texlog.report(a,b,c,...)
+ if c then
+ write_nl(format("%-16s> %s\n",a,format(b,c,...)))
+ elseif b then
+ write_nl(format("%-16s> %s\n",a,b))
+ else
+ write_nl(format("%-16s>\n",a))
+ end
+end
+
+function texlog.status(a,b,c,...)
+ if c then
+ write_nl(format("%-16s: %s\n",a,format(b,c,...)))
+ elseif b then
+ write_nl(format("%-16s: %s\n",a,b)) -- b can have %'s
+ else
+ write_nl(format("%-16s:>\n",a))
+ end
+end
+
+function texlog.line(fmt,...) -- new
+ if fmt then
+ write_nl(format(fmt,...))
+ else
+ write_nl("")
+ end
+end
+
+local real, user, sub
+
+function texlog.start_page_number()
+ real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+end
+
+local report_pages = logs.new("pages") -- not needed but saves checking when we grep for it
+
+function texlog.stop_page_number()
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ else
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ end
+ else
+ report_pages("flushing realpage %s",real)
+ end
+ else
+ report_pages("flushing page")
+ end
+ io.flush()
+end
+
+texlog.report_job_stat = statistics and statistics.show_job_stat
+
+-- xml logging
+
+local xmllog = { } logs.xml = xmllog setmetatable(xmllog, { __index = noplog })
+
+function xmllog.report(category,fmt,s,...) -- new
+ if s then
+ write_nl(format("<r category='%s'>%s</r>",category,format(fmt,s,...)))
+ elseif fmt then
+ write_nl(format("<r category='%s'>%s</r>",category,fmt))
+ else
+ write_nl(format("<r category='%s'/>",category))
+ end
+end
+
+function xmllog.status(category,fmt,s,...)
+ if s then
+ write_nl(format("<s category='%s'>%s</r>",category,format(fmt,s,...)))
+ elseif fmt then
+ write_nl(format("<s category='%s'>%s</r>",category,fmt))
+ else
+ write_nl(format("<s category='%s'/>",category))
+ end
+end
+
+function xmllog.line(fmt,...) -- new
+ if fmt then
+ write_nl(format("<r>%s</r>",format(fmt,...)))
+ else
+ write_nl("<r/>")
+ end
+end
+
+function xmllog.start() write_nl("<%s>" ) end
+function xmllog.stop () write_nl("</%s>") end
+function xmllog.push () write_nl("<!-- ") end
+function xmllog.pop () write_nl(" -->" ) end
+
+function xmllog.start_run()
+ write_nl("<?xml version='1.0' standalone='yes'?>")
+ write_nl("<job>") -- xmlns='www.pragma-ade.com/luatex/schemas/context-job.rng'
+ write_nl("")
+end
+
+function xmllog.stop_run()
+ write_nl("</job>")
+end
+
+function xmllog.start_page_number()
+ write_nl(format("<p real='%s' page='%s' sub='%s'", texcount.realpageno, texcount.userpageno, texcount.subpageno))
+end
+
+function xmllog.stop_page_number()
+ write("/>")
+ write_nl("")
+end
+
+function xmllog.report_output_pages(p,b)
+ write_nl(format("<v k='pages' v='%s'/>", p))
+ write_nl(format("<v k='bytes' v='%s'/>", b))
+ write_nl("")
+end
+
+function xmllog.report_output_log()
+ -- nothing
+end
+
+function xmllog.report_tex_stat(k,v)
+ write_nl("log","<v k='"..k.."'>"..tostring(v).."</v>")
+end
+
+local nesting = 0
+
+function xmllog.show_open(name)
+ nesting = nesting + 1
+ write_nl(format("<f l='%s' n='%s'>",nesting,name))
+end
+
+function xmllog.show_close(name)
+ write("</f> ")
+ nesting = nesting - 1
+end
+
+function xmllog.show_load(name)
+ write_nl(format("<f l='%s' n='%s'/>",nesting+1,name))
+end
+
+-- initialization
+
+if tex and (tex.jobname or tex.formatname) then
+ -- todo: this can be set in mtxrun ... or maybe we should just forget about this alternative format
+ if (os.getenv("mtx.directives.logmethod") or os.getenv("mtx_directives_logmethod")) == "xml" then
+ logs.set_method('xml')
+ else
+ logs.set_method('tex')
+ end
+else
+ logs.set_method('nop')
+end
+
+-- logging in runners -> these are actually the nop loggers
+
+local name, banner = 'report', 'context'
+
+function noplog.report(category,fmt,...) -- todo: fmt,s
+ if fmt then
+ write_nl(format("%s | %s: %s",name,category,format(fmt,...)))
+ elseif category then
+ write_nl(format("%s | %s",name,category))
+ else
+ write_nl(format("%s |",name))
+ end
+end
+
+noplog.status = noplog.report -- just to be sure, never used
+
+function noplog.simple(fmt,...) -- todo: fmt,s
+ if fmt then
+ write_nl(format("%s | %s",name,format(fmt,...)))
+ else
+ write_nl(format("%s |",name))
+ end
+end
+
+if utils then
+ utils.report = function(...) logs.simple(...) end
+end
+
+function logs.setprogram(newname,newbanner)
+ name, banner = newname, newbanner
+end
+
+function logs.extendbanner(newbanner)
+ banner = banner .. " | ".. newbanner
+end
+
+function logs.reportlines(str) -- todo: <lines></lines>
+ for line in gmatch(str,"(.-)[\n\r]") do
+ logs.report(line)
+ end
+end
+
+function logs.reportline() -- for scripts too
+ logs.report()
+end
+
+function logs.simpleline()
+ logs.report()
+end
+
+function logs.simplelines(str) -- todo: <lines></lines>
+ for line in gmatch(str,"(.-)[\n\r]") do
+ logs.simple(line)
+ end
+end
+
+function logs.reportbanner() -- for scripts too
+ logs.report(banner)
+end
+
+function logs.help(message,option)
+ logs.reportbanner()
+ logs.reportline()
+ logs.reportlines(message)
+ if option ~= "nomoreinfo" then
+ logs.reportline()
+ logs.reportlines(moreinfo)
+ end
+end
+
+-- logging to a file
+
+
+function logs.system(whereto,process,jobname,category,...)
+ local message = format("%s %s => %s => %s => %s\r",os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...))
+ for i=1,10 do
+ local f = io.open(whereto,"a")
+ if f then
+ f:write(message)
+ f:close()
+ break
+ else
+ sleep(0.1)
+ end
+ end
+end
+
+-- bonus
+
+function logs.fatal(where,...)
+ logs.report(where,"fatal error: %s, aborting now",format(...))
+ os.exit()
+end
+
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['trac-pro'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local getmetatable, setmetatable, rawset, type = getmetatable, setmetatable, rawset, type
+
+-- The protection implemented here is probably not that tight but good enough to catch
+-- problems due to naive usage.
+--
+-- There's a more extensive version (trac-xxx.lua) that supports nesting.
+--
+-- This will change when we have _ENV in lua 5.2+
+
+local trace_namespaces = false trackers.register("system.namespaces", function(v) trace_namespaces = v end)
+
+local report_system = logs.new("system")
+
+namespaces = { }
+
+local registered = { }
+
+local function report_index(k,name)
+ if trace_namespaces then
+ report_system("reference to '%s' in protected namespace '%s', %s",k,name,debug.traceback())
+ else
+ report_system("reference to '%s' in protected namespace '%s'",k,name)
+ end
+end
+
+local function report_newindex(k,name)
+ if trace_namespaces then
+ report_system("assignment to '%s' in protected namespace '%s', %s",k,name,debug.traceback())
+ else
+ report_system("assignment to '%s' in protected namespace '%s'",k,name)
+ end
+end
+
+local function register(name)
+ local data = name == "global" and _G or _G[name]
+ if not data then
+ return -- error
+ end
+ registered[name] = data
+ local m = getmetatable(data)
+ if not m then
+ m = { }
+ setmetatable(data,m)
+ end
+ local index, newindex = { }, { }
+ m.__saved__index = m.__index
+ m.__no__index = function(t,k)
+ if not index[k] then
+ index[k] = true
+ report_index(k,name)
+ end
+ return nil
+ end
+ m.__saved__newindex = m.__newindex
+ m.__no__newindex = function(t,k,v)
+ if not newindex[k] then
+ newindex[k] = true
+ report_newindex(k,name)
+ end
+ rawset(t,k,v)
+ end
+ m.__protection__depth = 0
+end
+
+local function private(name) -- maybe save name
+ local data = registered[name]
+ if not data then
+ data = _G[name]
+ if not data then
+ data = { }
+ _G[name] = data
+ end
+ register(name)
+ end
+ return data
+end
+
+local function protect(name)
+ local data = registered[name]
+ if not data then
+ return
+ end
+ local m = getmetatable(data)
+ local pd = m.__protection__depth
+ if pd > 0 then
+ m.__protection__depth = pd + 1
+ else
+ m.__save_d_index, m.__saved__newindex = m.__index, m.__newindex
+ m.__index, m.__newindex = m.__no__index, m.__no__newindex
+ m.__protection__depth = 1
+ end
+end
+
+local function unprotect(name)
+ local data = registered[name]
+ if not data then
+ return
+ end
+ local m = getmetatable(data)
+ local pd = m.__protection__depth
+ if pd > 1 then
+ m.__protection__depth = pd - 1
+ else
+ m.__index, m.__newindex = m.__saved__index, m.__saved__newindex
+ m.__protection__depth = 0
+ end
+end
+
+local function protectall()
+ for name, _ in next, registered do
+ if name ~= "global" then
+ protect(name)
+ end
+ end
+end
+
+local function unprotectall()
+ for name, _ in next, registered do
+ if name ~= "global" then
+ unprotect(name)
+ end
+ end
+end
+
+namespaces.register = register -- register when defined
+namespaces.private = private -- allocate and register if needed
+namespaces.protect = protect
+namespaces.unprotect = unprotect
+namespaces.protectall = protectall
+namespaces.unprotectall = unprotectall
+
+namespaces.private("namespaces") registered = { } register("global") -- unreachable
+
+directives.register("system.protect", function(v)
+ if v then
+ protectall()
+ else
+ unprotectall()
+ end
+end)
+
+directives.register("system.checkglobals", function(v)
+ if v then
+ report_system("enabling global namespace guard")
+ protect("global")
+ else
+ report_system("disabling global namespace guard")
+ unprotect("global")
+ end
+end)
+
+-- dummy section (will go to luat-dum.lua)
+
+
+
+
+
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['luat-env'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- A former version provided functionality for non embeded core
+-- scripts i.e. runtime library loading. Given the amount of
+-- Lua code we use now, this no longer makes sense. Much of this
+-- evolved before bytecode arrays were available and so a lot of
+-- code has disappeared already.
+
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+
+local report_resolvers = logs.new("resolvers")
+
+local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find
+local unquote, quote = string.unquote, string.quote
+
+-- precautions
+
+os.setlocale(nil,nil) -- useless feature and even dangerous in luatex
+
+function os.setlocale()
+ -- no way you can mess with it
+end
+
+-- dirty tricks
+
+if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaonly" then
+ arg[-1] = arg[0]
+ arg[ 0] = arg[2]
+ for k=3,#arg do
+ arg[k-2] = arg[k]
+ end
+ arg[#arg] = nil -- last
+ arg[#arg] = nil -- pre-last
+end
+
+-- environment
+
+environment = environment or { }
+environment.arguments = { }
+environment.files = { }
+environment.sortedflags = nil
+
+local mt = {
+ __index = function(_,k)
+ if k == "version" then
+ local version = tex.toks and tex.toks.contextversiontoks
+ if version and version ~= "" then
+ rawset(environment,"version",version)
+ return version
+ else
+ return "unknown"
+ end
+ elseif k == "jobname" or k == "formatname" then
+ local name = tex and tex[k]
+ if name or name== "" then
+ rawset(environment,k,name)
+ return name
+ else
+ return "unknown"
+ end
+ elseif k == "outputfilename" then
+ local name = environment.jobname
+ rawset(environment,k,name)
+ return name
+ end
+ end
+}
+
+setmetatable(environment,mt)
+
+function environment.initialize_arguments(arg)
+ local arguments, files = { }, { }
+ environment.arguments, environment.files, environment.sortedflags = arguments, files, nil
+ for index=1,#arg do
+ local argument = arg[index]
+ if index > 0 then
+ local flag, value = match(argument,"^%-+(.-)=(.-)$")
+ if flag then
+ arguments[flag] = unquote(value or "")
+ else
+ flag = match(argument,"^%-+(.+)")
+ if flag then
+ arguments[flag] = true
+ else
+ files[#files+1] = argument
+ end
+ end
+ end
+ end
+ environment.ownname = environment.ownname or arg[0] or 'unknown.lua'
+end
+
+function environment.setargument(name,value)
+ environment.arguments[name] = value
+end
+
+-- todo: defaults, better checks e.g on type (boolean versus string)
+--
+-- tricky: too many hits when we support partials unless we add
+-- a registration of arguments so from now on we have 'partial'
+
+function environment.argument(name,partial)
+ local arguments, sortedflags = environment.arguments, environment.sortedflags
+ if arguments[name] then
+ return arguments[name]
+ elseif partial then
+ if not sortedflags then
+ sortedflags = table.sortedkeys(arguments)
+ for k=1,#sortedflags do
+ sortedflags[k] = "^" .. sortedflags[k]
+ end
+ environment.sortedflags = sortedflags
+ end
+ -- example of potential clash: ^mode ^modefile
+ for k=1,#sortedflags do
+ local v = sortedflags[k]
+ if find(name,v) then
+ return arguments[sub(v,2,#v)]
+ end
+ end
+ end
+ return nil
+end
+
+function environment.split_arguments(separator) -- rather special, cut-off before separator
+ local done, before, after = false, { }, { }
+ local original_arguments = environment.original_arguments
+ for k=1,#original_arguments do
+ local v = original_arguments[k]
+ if not done and v == separator then
+ done = true
+ elseif done then
+ after[#after+1] = v
+ else
+ before[#before+1] = v
+ end
+ end
+ return before, after
+end
+
+function environment.reconstruct_commandline(arg,noquote)
+ arg = arg or environment.original_arguments
+ if noquote and #arg == 1 then
+ local a = arg[1]
+ a = resolvers.resolve(a)
+ a = unquote(a)
+ return a
+ elseif #arg > 0 then
+ local result = { }
+ for i=1,#arg do
+ local a = arg[i]
+ a = resolvers.resolve(a)
+ a = unquote(a)
+ a = gsub(a,'"','\\"') -- tricky
+ if find(a," ") then
+ result[#result+1] = quote(a)
+ else
+ result[#result+1] = a
+ end
+ end
+ return table.join(result," ")
+ else
+ return ""
+ end
+end
+
+if arg then
+
+ -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later)
+ local newarg, instring = { }, false
+
+ for index=1,#arg do
+ local argument = arg[index]
+ if find(argument,"^\"") then
+ newarg[#newarg+1] = gsub(argument,"^\"","")
+ if not find(argument,"\"$") then
+ instring = true
+ end
+ elseif find(argument,"\"$") then
+ newarg[#newarg] = newarg[#newarg] .. " " .. gsub(argument,"\"$","")
+ instring = false
+ elseif instring then
+ newarg[#newarg] = newarg[#newarg] .. " " .. argument
+ else
+ newarg[#newarg+1] = argument
+ end
+ end
+ for i=1,-5,-1 do
+ newarg[i] = arg[i]
+ end
+
+ environment.initialize_arguments(newarg)
+ environment.original_arguments = newarg
+ environment.raw_arguments = arg
+
+ arg = { } -- prevent duplicate handling
+
+end
+
+-- weird place ... depends on a not yet loaded module
+
+function environment.texfile(filename)
+ return resolvers.find_file(filename,'tex')
+end
+
+function environment.luafile(filename)
+ local resolved = resolvers.find_file(filename,'tex') or ""
+ if resolved ~= "" then
+ return resolved
+ end
+ resolved = resolvers.find_file(filename,'texmfscripts') or ""
+ if resolved ~= "" then
+ return resolved
+ end
+ return resolvers.find_file(filename,'luatexlibs') or ""
+end
+
+environment.loadedluacode = loadfile -- can be overloaded
+
+function environment.luafilechunk(filename,silent) -- used for loading lua bytecode in the format
+ filename = file.replacesuffix(filename, "lua")
+ local fullname = environment.luafile(filename)
+ if fullname and fullname ~= "" then
+ local data = environment.loadedluacode(fullname)
+ if trace_locating then
+ report_resolvers("loading file %s%s", fullname, not data and " failed" or "")
+ elseif not silent then
+ texio.write("<",data and "+ " or "- ",fullname,">")
+ end
+ return data
+ else
+ if trace_locating then
+ report_resolvers("unknown file %s", filename)
+ end
+ return nil
+ end
+end
+
+-- the next ones can use the previous ones / combine
+
+function environment.loadluafile(filename, version)
+ local lucname, luaname, chunk
+ local basename = file.removesuffix(filename)
+ if basename == filename then
+ lucname, luaname = basename .. ".luc", basename .. ".lua"
+ else
+ lucname, luaname = nil, basename -- forced suffix
+ end
+ -- when not overloaded by explicit suffix we look for a luc file first
+ local fullname = (lucname and environment.luafile(lucname)) or ""
+ if fullname ~= "" then
+ if trace_locating then
+ report_resolvers("loading %s", fullname)
+ end
+ chunk = loadfile(fullname) -- this way we don't need a file exists check
+ end
+ if chunk then
+ assert(chunk)()
+ if version then
+ -- we check of the version number of this chunk matches
+ local v = version -- can be nil
+ if modules and modules[filename] then
+ v = modules[filename].version -- new method
+ elseif versions and versions[filename] then
+ v = versions[filename] -- old method
+ end
+ if v == version then
+ return true
+ else
+ if trace_locating then
+ report_resolvers("version mismatch for %s: lua=%s, luc=%s", filename, v, version)
+ end
+ environment.loadluafile(filename)
+ end
+ else
+ return true
+ end
+ end
+ fullname = (luaname and environment.luafile(luaname)) or ""
+ if fullname ~= "" then
+ if trace_locating then
+ report_resolvers("loading %s", fullname)
+ end
+ chunk = loadfile(fullname) -- this way we don't need a file exists check
+ if not chunk then
+ if trace_locating then
+ report_resolvers("unknown file %s", filename)
+ end
+ else
+ assert(chunk)()
+ return true
+ end
+ end
+ return false
+end
+
end -- of closure
@@ -3906,6 +5048,8 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
+local report_xml = logs.new("xml")
+
--[[ldx--
<p>The parser used here is inspired by the variant discussed in the lua book, but
handles comment and processing instructions, has a different structure, provides
@@ -3920,7 +5064,6 @@ optimize the code.</p>
xml = xml or { }
---~ local xml = xml
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
@@ -4044,7 +5187,7 @@ local dcache, hcache, acache = { }, { }, { }
local mt = { }
-function initialize_mt(root)
+local function initialize_mt(root)
mt = { __index = root } -- will be redefined later
end
@@ -4148,7 +5291,7 @@ local reported_attribute_errors = { }
local function attribute_value_error(str)
if not reported_attribute_errors[str] then
- logs.report("xml","invalid attribute value: %q",str)
+ report_xml("invalid attribute value: %q",str)
reported_attribute_errors[str] = true
at._error_ = str
end
@@ -4156,7 +5299,7 @@ local function attribute_value_error(str)
end
local function attribute_specification_error(str)
if not reported_attribute_errors[str] then
- logs.report("xml","invalid attribute specification: %q",str)
+ report_xml("invalid attribute specification: %q",str)
reported_attribute_errors[str] = true
at._error_ = str
end
@@ -4219,18 +5362,18 @@ local function handle_hex_entity(str)
h = unify_predefined and predefined_unified[n]
if h then
if trace_entities then
- logs.report("xml","utfize, converting hex entity &#x%s; into %s",str,h)
+ report_xml("utfize, converting hex entity &#x%s; into %s",str,h)
end
elseif utfize then
h = (n and utfchar(n)) or xml.unknown_hex_entity_format(str) or ""
if not n then
- logs.report("xml","utfize, ignoring hex entity &#x%s;",str)
+ report_xml("utfize, ignoring hex entity &#x%s;",str)
elseif trace_entities then
- logs.report("xml","utfize, converting hex entity &#x%s; into %s",str,h)
+ report_xml("utfize, converting hex entity &#x%s; into %s",str,h)
end
else
if trace_entities then
- logs.report("xml","found entity &#x%s;",str)
+ report_xml("found entity &#x%s;",str)
end
h = "&#x" .. str .. ";"
end
@@ -4246,18 +5389,18 @@ local function handle_dec_entity(str)
d = unify_predefined and predefined_unified[n]
if d then
if trace_entities then
- logs.report("xml","utfize, converting dec entity &#%s; into %s",str,d)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
elseif utfize then
d = (n and utfchar(n)) or xml.unknown_dec_entity_format(str) or ""
if not n then
- logs.report("xml","utfize, ignoring dec entity &#%s;",str)
+ report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- logs.report("xml","utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,h)
end
else
if trace_entities then
- logs.report("xml","found entity &#%s;",str)
+ report_xml("found entity &#%s;",str)
end
d = "&#" .. str .. ";"
end
@@ -4282,7 +5425,7 @@ local function handle_any_entity(str)
end
if a then
if trace_entities then
- logs.report("xml","resolved entity &%s; -> %s (internal)",str,a)
+ report_xml("resolved entity &%s; -> %s (internal)",str,a)
end
a = lpegmatch(parsedentity,a) or a
else
@@ -4291,11 +5434,11 @@ local function handle_any_entity(str)
end
if a then
if trace_entities then
- logs.report("xml","resolved entity &%s; -> %s (external)",str,a)
+ report_xml("resolved entity &%s; -> %s (external)",str,a)
end
else
if trace_entities then
- logs.report("xml","keeping entity &%s;",str)
+ report_xml("keeping entity &%s;",str)
end
if str == "" then
a = "&error;"
@@ -4307,7 +5450,7 @@ local function handle_any_entity(str)
acache[str] = a
elseif trace_entities then
if not acache[str] then
- logs.report("xml","converting entity &%s; into %s",str,a)
+ report_xml("converting entity &%s; into %s",str,a)
acache[str] = a
end
end
@@ -4316,7 +5459,7 @@ local function handle_any_entity(str)
local a = acache[str]
if not a then
if trace_entities then
- logs.report("xml","found entity &%s;",str)
+ report_xml("found entity &%s;",str)
end
a = resolve_predefined and predefined_simplified[str]
if a then
@@ -4335,7 +5478,7 @@ local function handle_any_entity(str)
end
local function handle_end_entity(chr)
- logs.report("xml","error in entity, %q found instead of ';'",chr)
+ report_xml("error in entity, %q found instead of ';'",chr)
end
local space = S(' \r\n\t')
@@ -4470,7 +5613,7 @@ local function xmlconvert(data, settings)
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, result, entities = { }, { }, { }, { }, nil, nil, settings.entities or { }
+ stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -4498,6 +5641,7 @@ local function xmlconvert(data, settings)
else
errorstr = "invalid xml file - no text at all"
end
+ local result
if errorstr and errorstr ~= "" then
result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={ }, er = true } } }
setmetatable(stack, mt)
@@ -4678,7 +5822,7 @@ local function verbose_element(e,handlers)
ats[#ats+1] = format('%s=%q',k,v)
end
end
- if ern and trace_remap and ern ~= ens then
+ if ern and trace_entities and ern ~= ens then
ens = ern
end
if ens ~= "" then
@@ -4809,7 +5953,7 @@ local function newhandlers(settings)
if settings then
for k,v in next, settings do
if type(v) == "table" then
- tk = t[k] if not tk then tk = { } t[k] = tk end
+ local tk = t[k] if not tk then tk = { } t[k] = tk end
for kk,vv in next, v do
tk[kk] = vv
end
@@ -4920,7 +6064,7 @@ local function xmltext(root) -- inline
return (root and xmltostring(root)) or ""
end
-function initialize_mt(root)
+initialize_mt = function(root) -- redefinition
mt = { __tostring = xmltext, __index = root }
end
@@ -4955,7 +6099,6 @@ xml.string = xmlstring
<p>A few helpers:</p>
--ldx]]--
---~ xmlsetproperty(root,"settings",settings)
function xml.settings(e)
while e do
@@ -5117,6 +6260,8 @@ local trace_lpath = false if trackers then trackers.register("xml.path",
local trace_lparse = false if trackers then trackers.register("xml.parse", function(v) trace_lparse = v end) end
local trace_lprofile = false if trackers then trackers.register("xml.profile", function(v) trace_lpath = v trace_lparse = v trace_lprofile = v end) end
+local report_lpath = logs.new("lpath")
+
--[[ldx--
<p>We've now arrived at an interesting part: accessing the tree using a subset
of <l n='xpath'/> and since we're not compatible we call it <l n='lpath'/>. We
@@ -5143,7 +6288,7 @@ local function fallback (t, name)
if fn then
t[name] = fn
else
- logs.report("xml","unknown sub finalizer '%s'",tostring(name))
+ report_lpath("unknown sub finalizer '%s'",tostring(name))
fn = function() end
end
return fn
@@ -5204,11 +6349,6 @@ apply_axis['root'] = function(list)
end
apply_axis['self'] = function(list)
---~ local collected = { }
---~ for l=1,#list do
---~ collected[#collected+1] = list[l]
---~ end
---~ return collected
return list
end
@@ -5335,38 +6475,10 @@ apply_axis['namespace'] = function(list)
end
apply_axis['following'] = function(list) -- incomplete
---~ local collected = { }
---~ for l=1,#list do
---~ local ll = list[l]
---~ local p = ll.__p__
---~ local d = p.dt
---~ for i=ll.ni+1,#d do
---~ local di = d[i]
---~ if type(di) == "table" then
---~ collected[#collected+1] = di
---~ break
---~ end
---~ end
---~ end
---~ return collected
return { }
end
apply_axis['preceding'] = function(list) -- incomplete
---~ local collected = { }
---~ for l=1,#list do
---~ local ll = list[l]
---~ local p = ll.__p__
---~ local d = p.dt
---~ for i=ll.ni-1,1,-1 do
---~ local di = d[i]
---~ if type(di) == "table" then
---~ collected[#collected+1] = di
---~ break
---~ end
---~ end
---~ end
---~ return collected
return { }
end
@@ -5629,14 +6741,12 @@ local converter = Cs (
)
cleaner = Cs ( (
---~ lp_fastpos +
lp_reserved +
lp_number +
lp_string +
1 )^1 )
---~ expr
local template_e = [[
local expr = xml.expressions
@@ -5687,13 +6797,13 @@ local skip = { }
local function errorrunner_e(str,cnv)
if not skip[str] then
- logs.report("lpath","error in expression: %s => %s",str,cnv)
+ report_lpath("error in expression: %s => %s",str,cnv)
skip[str] = cnv or str
end
return false
end
local function errorrunner_f(str,arg)
- logs.report("lpath","error in finalizer: %s(%s)",str,arg or "")
+ report_lpath("error in finalizer: %s(%s)",str,arg or "")
return false
end
@@ -5860,7 +6970,7 @@ local function lshow(parsed)
end
local s = table.serialize_functions -- ugly
table.serialize_functions = false -- ugly
- logs.report("lpath","%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,table.serialize(parsed,false))
+ report_lpath("%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,table.serialize(parsed,false))
table.serialize_functions = s -- ugly
end
@@ -5890,7 +7000,7 @@ parse_pattern = function (pattern) -- the gain of caching is rather minimal
local np = #parsed
if np == 0 then
parsed = { pattern = pattern, register_self, state = "parsing error" }
- logs.report("lpath","parsing error in '%s'",pattern)
+ report_lpath("parsing error in '%s'",pattern)
lshow(parsed)
else
-- we could have done this with a more complex parser but this
@@ -5994,32 +7104,32 @@ local function traced_apply(list,parsed,nofparsed,order)
if trace_lparse then
lshow(parsed)
end
- logs.report("lpath", "collecting : %s",parsed.pattern)
- logs.report("lpath", " root tags : %s",tagstostring(list))
- logs.report("lpath", " order : %s",order or "unset")
+ report_lpath("collecting : %s",parsed.pattern)
+ report_lpath(" root tags : %s",tagstostring(list))
+ report_lpath(" order : %s",order or "unset")
local collected = list
for i=1,nofparsed do
local pi = parsed[i]
local kind = pi.kind
if kind == "axis" then
collected = apply_axis[pi.axis](collected)
- logs.report("lpath", "% 10i : ax : %s",(collected and #collected) or 0,pi.axis)
+ report_lpath("% 10i : ax : %s",(collected and #collected) or 0,pi.axis)
elseif kind == "nodes" then
collected = apply_nodes(collected,pi.nodetest,pi.nodes)
- logs.report("lpath", "% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest))
+ report_lpath("% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest))
elseif kind == "expression" then
collected = apply_expression(collected,pi.evaluator,order)
- logs.report("lpath", "% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted)
+ report_lpath("% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted)
elseif kind == "finalizer" then
collected = pi.finalizer(collected)
- logs.report("lpath", "% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
+ report_lpath("% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
return collected
end
if not collected or #collected == 0 then
local pn = i < nofparsed and parsed[nofparsed]
if pn and pn.kind == "finalizer" then
collected = pn.finalizer(collected)
- logs.report("lpath", "% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "")
+ report_lpath("% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "")
return collected
end
return nil
@@ -6132,7 +7242,7 @@ expressions.boolean = toboolean
-- user interface
local function traverse(root,pattern,handle)
- logs.report("xml","use 'xml.selection' instead for '%s'",pattern)
+ report_lpath("use 'xml.selection' instead for '%s'",pattern)
local collected = parse_apply({ root },pattern)
if collected then
for c=1,#collected do
@@ -6180,7 +7290,7 @@ local function dofunction(collected,fnc)
f(collected[c])
end
else
- logs.report("xml","unknown function '%s'",fnc)
+ report_lpath("unknown function '%s'",fnc)
end
end
end
@@ -6372,7 +7482,6 @@ local function xmlgsub(t,old,new) -- will be replaced
end
end
---~ xml.gsub = xmlgsub
function xml.strip_leading_spaces(dk,d,k) -- cosmetic, for manual
if d and k then
@@ -6384,12 +7493,7 @@ function xml.strip_leading_spaces(dk,d,k) -- cosmetic, for manual
end
end
---~ xml.escapes = { ['&'] = '&amp;', ['<'] = '&lt;', ['>'] = '&gt;', ['"'] = '&quot;' }
---~ xml.unescapes = { } for k,v in next, xml.escapes do xml.unescapes[v] = k end
---~ function xml.escaped (str) return (gsub(str,"(.)" , xml.escapes )) end
---~ function xml.unescaped(str) return (gsub(str,"(&.-;)", xml.unescapes)) end
---~ function xml.cleansed (str) return (gsub(str,"<.->" , '' )) end -- "%b<>"
local P, S, R, C, V, Cc, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Cs
@@ -6455,6 +7559,8 @@ if not modules then modules = { } end modules ['lxml-aux'] = {
local trace_manipulations = false trackers.register("lxml.manipulations", function(v) trace_manipulations = v end)
+local report_xml = logs.new("xml")
+
local xmlparseapply, xmlconvert, xmlcopy, xmlname = xml.parse_apply, xml.convert, xml.copy, xml.name
local xmlinheritedconvert = xml.inheritedconvert
@@ -6463,7 +7569,7 @@ local insert, remove = table.insert, table.remove
local gmatch, gsub = string.gmatch, string.gsub
local function report(what,pattern,c,e)
- logs.report("xml","%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
+ report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
end
local function withelements(e,handle,depth)
@@ -6616,12 +7722,7 @@ local function xmltoelement(whatever,root)
return whatever -- string
end
if element then
- --~ if element.ri then
- --~ element = element.dt[element.ri].dt
- --~ else
- --~ element = element.dt
- --~ end
- end
+ end
return element
end
@@ -6760,9 +7861,6 @@ local function include(xmldata,pattern,attribute,recursive,loaddata)
-- for the moment hard coded
epdt[ek.ni] = xml.escaped(data) -- d[k] = xml.escaped(data)
else
---~ local settings = xmldata.settings
---~ settings.parent_root = xmldata -- to be tested
---~ local xi = xmlconvert(data,settings)
local xi = xmlinheritedconvert(data,xmldata)
if not xi then
epdt[ek.ni] = "" -- xml.empty(d,k)
@@ -6779,28 +7877,7 @@ end
xml.include = include
---~ local function manipulate(xmldata,pattern,manipulator) -- untested and might go away
---~ local collected = xmlparseapply({ xmldata },pattern)
---~ if collected then
---~ local xmltostring = xml.tostring
---~ for c=1,#collected do
---~ local e = collected[c]
---~ local data = manipulator(xmltostring(e))
---~ if data == "" then
---~ epdt[e.ni] = ""
---~ else
---~ local xi = xmlinheritedconvert(data,xmldata)
---~ if not xi then
---~ epdt[e.ni] = ""
---~ else
---~ epdt[e.ni] = xml.body(xi) -- xml.assign(d,k,xi)
---~ end
---~ end
---~ end
---~ end
---~ end
-
---~ xml.manipulate = manipulate
+
function xml.strip_whitespace(root, pattern, nolines) -- strips all leading and trailing space !
local collected = xmlparseapply({ root },pattern)
@@ -6826,8 +7903,7 @@ function xml.strip_whitespace(root, pattern, nolines) -- strips all leading and
end
end
else
- --~ str.ni = i
- t[#t+1] = str
+ t[#t+1] = str
end
end
e.dt = t
@@ -7285,825 +8361,1137 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['luat-env'] = {
+if not modules then modules = { } end modules ['data-ini'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ license = "see context related readme files",
}
--- A former version provided functionality for non embeded core
--- scripts i.e. runtime library loading. Given the amount of
--- Lua code we use now, this no longer makes sense. Much of this
--- evolved before bytecode arrays were available and so a lot of
--- code has disappeared already.
+local gsub, find, gmatch = string.gsub, string.find, string.gmatch
+local concat = table.concat
+local next, type = next, type
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join
-local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find
-local unquote, quote = string.unquote, string.quote
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end)
+local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
--- precautions
+local report_resolvers = logs.new("resolvers")
-os.setlocale(nil,nil) -- useless feature and even dangerous in luatex
+local ostype, osname, ossetenv, osgetenv = os.type, os.name, os.setenv, os.getenv
-function os.setlocale()
- -- no way you can mess with it
-end
-
--- dirty tricks
-
-if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaonly" then
- arg[-1]=arg[0] arg[0]=arg[2] for k=3,#arg do arg[k-2]=arg[k] end arg[#arg]=nil arg[#arg]=nil
-end
+-- The code here used to be part of a data-res but for convenience
+-- we now split it over multiple files. As this file is now the
+-- starting point we introduce resolvers here.
-if profiler and os.env["MTX_PROFILE_RUN"] == "YES" then
- profiler.start("luatex-profile.log")
-end
+resolvers = resolvers or { }
--- environment
+-- We don't want the kpse library to kick in. Also, we want to be able to
+-- execute programs. Control over execution is implemented later.
-environment = environment or { }
-environment.arguments = { }
-environment.files = { }
-environment.sortedflags = nil
+texconfig.kpse_init = false
+texconfig.shell_escape = 't'
-if not environment.jobname or environment.jobname == "" then if tex then environment.jobname = tex.jobname end end
-if not environment.version or environment.version == "" then environment.version = "unknown" end
-if not environment.jobname then environment.jobname = "unknown" end
+kpse = { original = kpse }
-function environment.initialize_arguments(arg)
- local arguments, files = { }, { }
- environment.arguments, environment.files, environment.sortedflags = arguments, files, nil
- for index=1,#arg do
- local argument = arg[index]
- if index > 0 then
- local flag, value = match(argument,"^%-+(.-)=(.-)$")
- if flag then
- arguments[flag] = unquote(value or "")
- else
- flag = match(argument,"^%-+(.+)")
- if flag then
- arguments[flag] = true
- else
- files[#files+1] = argument
- end
+setmetatable(kpse, {
+ __index = function(kp,name)
+ local r = resolvers[name]
+ if not r then
+ r = function (...)
+ report_resolvers("not supported: %s(%s)",name,concat(...))
end
+ rawset(kp,name,r)
end
+ return r
end
- environment.ownname = environment.ownname or arg[0] or 'unknown.lua'
+} )
+
+-- First we check a couple of environment variables. Some might be
+-- set already but we need then later on. We start with the system
+-- font path.
+
+do
+
+ local osfontdir = osgetenv("OSFONTDIR")
+
+ if osfontdir and osfontdir ~= "" then
+ -- ok
+ elseif osname == "windows" then
+ ossetenv("OSFONTDIR","c:/windows/fonts//")
+ elseif osname == "macosx" then
+ ossetenv("OSFONTDIR","$HOME/Library/Fonts//;/Library/Fonts//;/System/Library/Fonts//")
+ end
+
end
-function environment.setargument(name,value)
- environment.arguments[name] = value
+-- Next comes the user's home path. We need this as later on we have
+-- to replace ~ with its value.
+
+do
+
+ local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or '~'
+
+ homedir = file.collapse_path(homedir)
+
+ ossetenv("HOME", homedir) -- can be used in unix cnf files
+ ossetenv("USERPROFILE",homedir) -- can be used in windows cnf files
+
+ environment.homedir = homedir
+
end
--- todo: defaults, better checks e.g on type (boolean versus string)
---
--- tricky: too many hits when we support partials unless we add
--- a registration of arguments so from now on we have 'partial'
+-- The following code sets the name of the own binary and its
+-- path. This is fallback code as we have os.selfdir now.
-function environment.argument(name,partial)
- local arguments, sortedflags = environment.arguments, environment.sortedflags
- if arguments[name] then
- return arguments[name]
- elseif partial then
- if not sortedflags then
- sortedflags = table.sortedkeys(arguments)
- for k=1,#sortedflags do
- sortedflags[k] = "^" .. sortedflags[k]
- end
- environment.sortedflags = sortedflags
+do
+
+ local args = environment.original_arguments or arg -- this needs a cleanup
+
+ local ownbin = environment.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex"
+ local ownpath = environment.ownpath or os.selfdir
+
+ ownbin = file.collapse_path(ownbin)
+ ownpath = file.collapse_path(ownpath)
+
+ if not ownpath or ownpath == "" or ownpath == "unset" then
+ ownpath = args[-1] or arg[-1]
+ ownpath = ownpath and filedirname(gsub(ownpath,"\\","/"))
+ if not ownpath or ownpath == "" then
+ ownpath = args[-0] or arg[-0]
+ ownpath = ownpath and filedirname(gsub(ownpath,"\\","/"))
end
- -- example of potential clash: ^mode ^modefile
- for k=1,#sortedflags do
- local v = sortedflags[k]
- if find(name,v) then
- return arguments[sub(v,2,#v)]
+ local binary = ownbin
+ if not ownpath or ownpath == "" then
+ ownpath = ownpath and filedirname(binary)
+ end
+ if not ownpath or ownpath == "" then
+ if os.binsuffix ~= "" then
+ binary = file.replacesuffix(binary,os.binsuffix)
+ end
+ local path = osgetenv("PATH")
+ if path then
+ for p in gmatch(path,"[^"..io.pathseparator.."]+") do
+ local b = filejoin(p,binary)
+ if lfs.isfile(b) then
+ -- we assume that after changing to the path the currentdir function
+ -- resolves to the real location and use this side effect here; this
+ -- trick is needed because on the mac installations use symlinks in the
+ -- path instead of real locations
+ local olddir = lfs.currentdir()
+ if lfs.chdir(p) then
+ local pp = lfs.currentdir()
+ if trace_locating and p ~= pp then
+ report_resolvers("following symlink '%s' to '%s'",p,pp)
+ end
+ ownpath = pp
+ lfs.chdir(olddir)
+ else
+ if trace_locating then
+ report_resolvers("unable to check path '%s'",p)
+ end
+ ownpath = p
+ end
+ break
+ end
+ end
end
end
+ if not ownpath or ownpath == "" then
+ ownpath = "."
+ report_resolvers("forcing fallback ownpath .")
+ elseif trace_locating then
+ report_resolvers("using ownpath '%s'",ownpath)
+ end
end
- return nil
+
+ environment.ownbin = ownbin
+ environment.ownpath = ownpath
+
end
-environment.argument("x",true)
+resolvers.ownpath = environment.ownpath
-function environment.split_arguments(separator) -- rather special, cut-off before separator
- local done, before, after = false, { }, { }
- local original_arguments = environment.original_arguments
- for k=1,#original_arguments do
- local v = original_arguments[k]
- if not done and v == separator then
- done = true
- elseif done then
- after[#after+1] = v
- else
- before[#before+1] = v
- end
- end
- return before, after
+function resolvers.getownpath()
+ return environment.ownpath
end
-function environment.reconstruct_commandline(arg,noquote)
- arg = arg or environment.original_arguments
- if noquote and #arg == 1 then
- local a = arg[1]
- a = resolvers.resolve(a)
- a = unquote(a)
- return a
- elseif #arg > 0 then
- local result = { }
- for i=1,#arg do
- local a = arg[i]
- a = resolvers.resolve(a)
- a = unquote(a)
- a = gsub(a,'"','\\"') -- tricky
- if find(a," ") then
- result[#result+1] = quote(a)
- else
- result[#result+1] = a
- end
- end
- return table.join(result," ")
+-- The self variables permit us to use only a few (or even no)
+-- environment variables.
+
+do
+
+ local ownpath = environment.ownpath or dir.current()
+
+ if ownpath then
+ ossetenv('SELFAUTOLOC', file.collapse_path(ownpath))
+ ossetenv('SELFAUTODIR', file.collapse_path(ownpath .. "/.."))
+ ossetenv('SELFAUTOPARENT', file.collapse_path(ownpath .. "/../.."))
else
- return ""
+ report_resolvers("error: unable to locate ownpath")
+ os.exit()
end
+
end
-if arg then
+-- The running os:
- -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later)
- local newarg, instring = { }, false
+-- todo: check is context sits here os.platform is more trustworthy
+-- that the bin check as mtx-update runs from another path
- for index=1,#arg do
- local argument = arg[index]
- if find(argument,"^\"") then
- newarg[#newarg+1] = gsub(argument,"^\"","")
- if not find(argument,"\"$") then
- instring = true
- end
- elseif find(argument,"\"$") then
- newarg[#newarg] = newarg[#newarg] .. " " .. gsub(argument,"\"$","")
- instring = false
- elseif instring then
- newarg[#newarg] = newarg[#newarg] .. " " .. argument
- else
- newarg[#newarg+1] = argument
- end
- end
- for i=1,-5,-1 do
- newarg[i] = arg[i]
- end
+local texos = environment.texos or osgetenv("TEXOS")
+local texmfos = environment.texmfos or osgetenv('SELFAUTODIR')
- environment.initialize_arguments(newarg)
- environment.original_arguments = newarg
- environment.raw_arguments = arg
+if not texos or texos == "" then
+ texos = file.basename(texmfos)
+end
- arg = { } -- prevent duplicate handling
+ossetenv('TEXMFOS', texmfos) -- full bin path
+ossetenv('TEXOS', texos) -- partial bin parent
+ossetenv('SELFAUTOSYSTEM',os.platform) -- bonus
-end
+environment.texos = texos
+environment.texmfos = texmfos
--- weird place ... depends on a not yet loaded module
+-- The current root:
-function environment.texfile(filename)
- return resolvers.find_file(filename,'tex')
-end
+local texroot = environment.texroot or osgetenv("TEXROOT")
-function environment.luafile(filename)
- local resolved = resolvers.find_file(filename,'tex') or ""
- if resolved ~= "" then
- return resolved
- end
- resolved = resolvers.find_file(filename,'texmfscripts') or ""
- if resolved ~= "" then
- return resolved
- end
- return resolvers.find_file(filename,'luatexlibs') or ""
+if not texroot or texroot == "" then
+ texroot = osgetenv('SELFAUTOPARENT')
+ ossetenv('TEXROOT',texroot)
end
-environment.loadedluacode = loadfile -- can be overloaded
+environment.texroot = file.collapse_path(texroot)
---~ function environment.loadedluacode(name)
---~ if os.spawn("texluac -s -o texluac.luc " .. name) == 0 then
---~ local chunk = loadstring(io.loaddata("texluac.luc"))
---~ os.remove("texluac.luc")
---~ return chunk
---~ else
---~ environment.loadedluacode = loadfile -- can be overloaded
---~ return loadfile(name)
---~ end
---~ end
-
-function environment.luafilechunk(filename) -- used for loading lua bytecode in the format
- filename = file.replacesuffix(filename, "lua")
- local fullname = environment.luafile(filename)
- if fullname and fullname ~= "" then
- if trace_locating then
- logs.report("fileio","loading file %s", fullname)
- end
- return environment.loadedluacode(fullname)
- else
- if trace_locating then
- logs.report("fileio","unknown file %s", filename)
- end
- return nil
+-- Tracing. Todo ...
+
+function resolvers.settrace(n) -- no longer number but: 'locating' or 'detail'
+ if n then
+ trackers.disable("resolvers.*")
+ trackers.enable("resolvers."..n)
end
end
--- the next ones can use the previous ones / combine
+resolvers.settrace(osgetenv("MTX_INPUT_TRACE"))
-function environment.loadluafile(filename, version)
- local lucname, luaname, chunk
- local basename = file.removesuffix(filename)
- if basename == filename then
- lucname, luaname = basename .. ".luc", basename .. ".lua"
- else
- lucname, luaname = nil, basename -- forced suffix
- end
- -- when not overloaded by explicit suffix we look for a luc file first
- local fullname = (lucname and environment.luafile(lucname)) or ""
- if fullname ~= "" then
- if trace_locating then
- logs.report("fileio","loading %s", fullname)
- end
- chunk = loadfile(fullname) -- this way we don't need a file exists check
- end
- if chunk then
- assert(chunk)()
- if version then
- -- we check of the version number of this chunk matches
- local v = version -- can be nil
- if modules and modules[filename] then
- v = modules[filename].version -- new method
- elseif versions and versions[filename] then
- v = versions[filename] -- old method
- end
- if v == version then
- return true
- else
- if trace_locating then
- logs.report("fileio","version mismatch for %s: lua=%s, luc=%s", filename, v, version)
- end
- environment.loadluafile(filename)
- end
- else
- return true
- end
- end
- fullname = (luaname and environment.luafile(luaname)) or ""
- if fullname ~= "" then
- if trace_locating then
- logs.report("fileio","loading %s", fullname)
- end
- chunk = loadfile(fullname) -- this way we don't need a file exists check
- if not chunk then
- if trace_locating then
- logs.report("fileio","unknown file %s", filename)
- end
- else
- assert(chunk)()
- return true
- end
- end
- return false
-end
+-- todo:
+
+-- if profiler and osgetenv("MTX_PROFILE_RUN") == "YES" then
+-- profiler.start("luatex-profile.log")
+-- end
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['trac-inf'] = {
+if not modules then modules = { } end modules ['data-exp'] = {
version = 1.001,
- comment = "companion to trac-inf.mkiv",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ license = "see context related readme files",
}
-local format = string.format
+local format, gsub, find, gmatch, lower = string.format, string.gsub, string.find, string.gmatch, string.lower
+local concat, sort = table.concat, table.sort
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local lpegCt, lpegCs, lpegP, lpegC, lpegS = lpeg.Ct, lpeg.Cs, lpeg.P, lpeg.C, lpeg.S
+local type, next = type, next
-local statusinfo, n, registered = { }, 0, { }
+local ostype = os.type
+local collapse_path = file.collapse_path
-statistics = statistics or { }
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
-statistics.enable = true
-statistics.threshold = 0.05
+local report_resolvers = logs.new("resolvers")
--- timing functions
+-- As this bit of code is somewhat special it gets its own module. After
+-- all, when working on the main resolver code, I don't want to scroll
+-- past this every time.
-local clock = os.gettimeofday or os.clock
+-- {a,b,c,d}
+-- a,b,c/{p,q,r},d
+-- a,b,c/{p,q,r}/d/{x,y,z}//
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
+
+-- this one is better and faster, but it took me a while to realize
+-- that this kind of replacement is cleaner than messy parsing and
+-- fuzzy concatenating we can probably gain a bit with selectively
+-- applying lpeg, but experiments with lpeg parsing this proved not to
+-- work that well; the parsing is ok, but dealing with the resulting
+-- table is a pain because we need to work inside-out recursively
-local notimer
+local dummy_path_expr = "^!*unset/*$"
-function statistics.hastimer(instance)
- return instance and instance.starttime
+local function do_first(a,b)
+ local t = { }
+ for s in gmatch(b,"[^,]+") do t[#t+1] = a .. s end
+ return "{" .. concat(t,",") .. "}"
end
-function statistics.resettiming(instance)
- if not instance then
- notimer = { timing = 0, loadtime = 0 }
- else
- instance.timing, instance.loadtime = 0, 0
+local function do_second(a,b)
+ local t = { }
+ for s in gmatch(a,"[^,]+") do t[#t+1] = s .. b end
+ return "{" .. concat(t,",") .. "}"
+end
+
+local function do_both(a,b)
+ local t = { }
+ for sa in gmatch(a,"[^,]+") do
+ for sb in gmatch(b,"[^,]+") do
+ t[#t+1] = sa .. sb
+ end
end
+ return "{" .. concat(t,",") .. "}"
+end
+
+local function do_three(a,b,c)
+ return a .. b.. c
end
-function statistics.starttiming(instance)
- if not instance then
- notimer = { }
- instance = notimer
+local stripper_1 = lpeg.stripper("{}@")
+
+local replacer_1 = lpeg.replacer {
+ { ",}", ",@}" },
+ { "{,", "{@," },
+}
+
+local function splitpathexpr(str, newlist, validate)
+ -- no need for further optimization as it is only called a
+ -- few times, we can use lpeg for the sub
+ if trace_expansions then
+ report_resolvers("expanding variable '%s'",str)
end
- local it = instance.timing
- if not it then
- it = 0
+ local t, ok, done = newlist or { }, false, false
+ str = lpegmatch(replacer_1,str)
+ while true do
+ done = false
+ while true do
+ str, ok = gsub(str,"([^{},]+){([^{}]+)}",do_first)
+ if ok > 0 then done = true else break end
+ end
+ while true do
+ str, ok = gsub(str,"{([^{}]+)}([^{},]+)",do_second)
+ if ok > 0 then done = true else break end
+ end
+ while true do
+ str, ok = gsub(str,"{([^{}]+)}{([^{}]+)}",do_both)
+ if ok > 0 then done = true else break end
+ end
+ str, ok = gsub(str,"({[^{}]*){([^{}]+)}([^{}]*})",do_three)
+ if ok > 0 then done = true end
+ if not done then break end
end
- if it == 0 then
- instance.starttime = clock()
- if not instance.loadtime then
- instance.loadtime = 0
+ str = lpegmatch(stripper_1,str)
+ if validate then
+ for s in gmatch(str,"[^,]+") do
+ s = validate(s)
+ if s then t[#t+1] = s end
end
else
---~ logs.report("system","nested timing (%s)",tostring(instance))
- end
- instance.timing = it + 1
-end
-
-function statistics.stoptiming(instance, report)
- if not instance then
- instance = notimer
+ for s in gmatch(str,"[^,]+") do
+ t[#t+1] = s
+ end
end
- if instance then
- local it = instance.timing
- if it > 1 then
- instance.timing = it - 1
- else
- local starttime = instance.starttime
- if starttime then
- local stoptime = clock()
- local loadtime = stoptime - starttime
- instance.stoptime = stoptime
- instance.loadtime = instance.loadtime + loadtime
- if report then
- statistics.report("load time %0.3f",loadtime)
- end
- instance.timing = 0
- return loadtime
- end
+ if trace_expansions then
+ for k=1,#t do
+ report_resolvers("% 4i: %s",k,t[k])
end
end
- return 0
+ return t
end
-function statistics.elapsedtime(instance)
- if not instance then
- instance = notimer
+local function validate(s)
+ local isrecursive = find(s,"//$")
+ s = collapse_path(s)
+ if isrecursive then
+ s = s .. "//"
end
- return format("%0.3f",(instance and instance.loadtime) or 0)
+ return s ~= "" and not find(s,dummy_path_expr) and s
end
-function statistics.elapsedindeed(instance)
- if not instance then
- instance = notimer
+resolvers.validated_path = validate -- keeps the trailing //
+
+function resolvers.expanded_path_from_list(pathlist) -- maybe not a list, just a path
+ -- a previous version fed back into pathlist
+ local newlist, ok = { }, false
+ for k=1,#pathlist do
+ if find(pathlist[k],"[{}]") then
+ ok = true
+ break
+ end
end
- local t = (instance and instance.loadtime) or 0
- return t > statistics.threshold
+ if ok then
+ for k=1,#pathlist do
+ splitpathexpr(pathlist[k],newlist,validate)
+ end
+ else
+ for k=1,#pathlist do
+ for p in gmatch(pathlist[k],"([^,]+)") do
+ p = validate(p)
+ if p ~= "" then newlist[#newlist+1] = p end
+ end
+ end
+ end
+ return newlist
end
-function statistics.elapsedseconds(instance,rest) -- returns nil if 0 seconds
- if statistics.elapsedindeed(instance) then
- return format("%s seconds %s", statistics.elapsedtime(instance),rest or "")
- end
+-- We also put some cleanup code here.
+
+local cleanup -- used recursively
+
+cleanup = lpeg.replacer {
+ { "!", "" },
+ { "\\", "/" },
+ { "~" , function() return lpegmatch(cleanup,environment.homedir) end },
+}
+
+function resolvers.clean_path(str)
+ return str and lpegmatch(cleanup,str)
end
--- general function
+-- This one strips quotes and funny tokens.
-function statistics.register(tag,fnc)
- if statistics.enable and type(fnc) == "function" then
- local rt = registered[tag] or (#statusinfo + 1)
- statusinfo[rt] = { tag, fnc }
- registered[tag] = rt
- if #tag > n then n = #tag end
- end
+
+local expandhome = lpegP("~") / "$HOME" -- environment.homedir
+
+local dodouble = lpegP('"')/"" * (expandhome + (1 - lpegP('"')))^0 * lpegP('"')/""
+local dosingle = lpegP("'")/"" * (expandhome + (1 - lpegP("'")))^0 * lpegP("'")/""
+local dostring = (expandhome + 1 )^0
+
+local stripper = lpegCs(
+ lpegpatterns.unspacer * (dosingle + dodouble + dostring) * lpegpatterns.unspacer
+)
+
+function resolvers.checked_variable(str) -- assumes str is a string
+ return lpegmatch(stripper,str) or str
end
-function statistics.show(reporter)
- if statistics.enable then
- if not reporter then reporter = function(tag,data,n) texio.write_nl(tag .. " " .. data) end end
- -- this code will move
- local register = statistics.register
- register("luatex banner", function()
- return string.lower(status.banner)
- end)
- register("control sequences", function()
- return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
- end)
- register("callbacks", function()
- local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
- return format("direct: %s, indirect: %s, total: %s", total-indirect, indirect, total)
- end)
- register("current memory usage", statistics.memused)
- register("runtime",statistics.runtime)
--- --
- for i=1,#statusinfo do
- local s = statusinfo[i]
- local r = s[2]()
- if r then
- reporter(s[1],r,n)
+-- The path splitter:
+
+-- A config (optionally) has the paths split in tables. Internally
+-- we join them and split them after the expansion has taken place. This
+-- is more convenient.
+
+
+local cache = { }
+
+local splitter = lpegCt(lpeg.splitat(lpegS(ostype == "windows" and ";" or ":;"))) -- maybe add ,
+
+local function split_configuration_path(str) -- beware, this can be either a path or a { specification }
+ if str then
+ local found = cache[str]
+ if not found then
+ if str == "" then
+ found = { }
+ else
+ str = gsub(str,"\\","/")
+ local split = lpegmatch(splitter,str)
+ found = { }
+ for i=1,#split do
+ local s = split[i]
+ if not find(s,"^{*unset}*") then
+ found[#found+1] = s
+ end
+ end
+ if trace_expansions then
+ report_resolvers("splitting path specification '%s'",str)
+ for k=1,#found do
+ report_resolvers("% 4i: %s",k,found[k])
+ end
+ end
+ cache[str] = found
end
end
- texio.write_nl("") -- final newline
- statistics.enable = false
+ return found
end
end
-function statistics.show_job_stat(tag,data,n)
- texio.write_nl(format("%-15s: %s - %s","mkiv lua stats",tag:rpadd(n," "),data))
-end
-
-function statistics.memused() -- no math.round yet -)
- local round = math.round or math.floor
- return format("%s MB (ctx: %s MB)",round(collectgarbage("count")/1000), round(status.luastate_bytes/1000000))
-end
+resolvers.split_configuration_path = split_configuration_path
-if statistics.runtime then
- -- already loaded and set
-elseif luatex and luatex.starttime then
- statistics.starttime = luatex.starttime
- statistics.loadtime = 0
- statistics.timing = 0
-else
- statistics.starttiming(statistics)
+function resolvers.split_path(str)
+ if type(str) == 'table' then
+ return str
+ else
+ return split_configuration_path(str)
+ end
end
-function statistics.runtime()
- statistics.stoptiming(statistics)
- return statistics.formatruntime(statistics.elapsedtime(statistics))
+function resolvers.join_path(str)
+ if type(str) == 'table' then
+ return file.join_path(str)
+ else
+ return str
+ end
end
-function statistics.formatruntime(runtime)
- return format("%s seconds", statistics.elapsedtime(statistics))
-end
+-- The next function scans directories and returns a hash where the
+-- entries are either strings or tables.
-function statistics.timed(action,report)
- local timer = { }
- report = report or logs.simple
- statistics.starttiming(timer)
- action()
- statistics.stoptiming(timer)
- report("total runtime: %s",statistics.elapsedtime(timer))
-end
+-- starting with . or .. etc or funny char
--- where, not really the best spot for this:
-commands = commands or { }
-local timer
-function commands.resettimer()
- statistics.resettiming(timer)
- statistics.starttiming(timer)
-end
+local weird = lpegP(".")^1 + lpeg.anywhere(lpegS("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
-function commands.elapsedtime()
- statistics.stoptiming(timer)
- tex.sprint(statistics.elapsedtime(timer))
+function resolvers.scan_files(specification)
+ if trace_locating then
+ report_resolvers("scanning path '%s'",specification)
+ end
+ local attributes, directory = lfs.attributes, lfs.dir
+ local files = { __path__ = specification }
+ local n, m, r = 0, 0, 0
+ local function scan(spec,path)
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ n = n + 1
+ local f = files[name]
+ if f then
+ if type(f) == 'string' then
+ files[name] = { f, path }
+ else
+ f[#f+1] = path
+ end
+ else -- probably unique anyway
+ files[name] = path
+ local lower = lower(name)
+ if name ~= lower then
+ files["remap:"..lower] = name
+ r = r + 1
+ end
+ end
+ elseif mode == 'directory' then
+ m = m + 1
+ if path ~= "" then
+ dirs[#dirs+1] = path..'/'..name
+ else
+ dirs[#dirs+1] = name
+ end
+ end
+ end
+ end
+ if #dirs > 0 then
+ sort(dirs)
+ for i=1,#dirs do
+ scan(spec,dirs[i])
+ end
+ end
+ end
+ scan(specification .. '/',"")
+ files.__files__, files.__directories__, files.__remappings__ = n, m, r
+ if trace_locating then
+ report_resolvers("%s files found on %s directories with %s uppercase remappings",n,m,r)
+ end
+ return files
end
-commands.resettimer()
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['trac-log'] = {
+if not modules then modules = { } end modules ['data-env'] = {
version = 1.001,
- comment = "companion to trac-log.mkiv",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ license = "see context related readme files",
}
--- this is old code that needs an overhaul
+local formats = { } resolvers.formats = formats
+local suffixes = { } resolvers.suffixes = suffixes
+local dangerous = { } resolvers.dangerous = dangerous
+local suffixmap = { } resolvers.suffixmap = suffixmap
+local alternatives = { } resolvers.alternatives = alternatives
+
+formats['afm'] = 'AFMFONTS' suffixes['afm'] = { 'afm' }
+formats['enc'] = 'ENCFONTS' suffixes['enc'] = { 'enc' }
+formats['fmt'] = 'TEXFORMATS' suffixes['fmt'] = { 'fmt' }
+formats['map'] = 'TEXFONTMAPS' suffixes['map'] = { 'map' }
+formats['mp'] = 'MPINPUTS' suffixes['mp'] = { 'mp' }
+formats['ocp'] = 'OCPINPUTS' suffixes['ocp'] = { 'ocp' }
+formats['ofm'] = 'OFMFONTS' suffixes['ofm'] = { 'ofm', 'tfm' }
+formats['otf'] = 'OPENTYPEFONTS' suffixes['otf'] = { 'otf' }
+formats['opl'] = 'OPLFONTS' suffixes['opl'] = { 'opl' }
+formats['otp'] = 'OTPINPUTS' suffixes['otp'] = { 'otp' }
+formats['ovf'] = 'OVFFONTS' suffixes['ovf'] = { 'ovf', 'vf' }
+formats['ovp'] = 'OVPFONTS' suffixes['ovp'] = { 'ovp' }
+formats['tex'] = 'TEXINPUTS' suffixes['tex'] = { 'tex' }
+formats['tfm'] = 'TFMFONTS' suffixes['tfm'] = { 'tfm' }
+formats['ttf'] = 'TTFONTS' suffixes['ttf'] = { 'ttf', 'ttc', 'dfont' }
+formats['pfb'] = 'T1FONTS' suffixes['pfb'] = { 'pfb', 'pfa' }
+formats['vf'] = 'VFFONTS' suffixes['vf'] = { 'vf' }
+formats['fea'] = 'FONTFEATURES' suffixes['fea'] = { 'fea' }
+formats['cid'] = 'FONTCIDMAPS' suffixes['cid'] = { 'cid', 'cidmap' }
+formats['texmfscripts'] = 'TEXMFSCRIPTS' suffixes['texmfscripts'] = { 'rb', 'pl', 'py' }
+formats['lua'] = 'LUAINPUTS' suffixes['lua'] = { 'lua', 'luc', 'tma', 'tmc' }
+formats['lib'] = 'CLUAINPUTS' suffixes['lib'] = (os.libsuffix and { os.libsuffix }) or { 'dll', 'so' }
---~ io.stdout:setvbuf("no")
---~ io.stderr:setvbuf("no")
-
-local write_nl, write = texio.write_nl or print, texio.write or io.write
-local format, gmatch = string.format, string.gmatch
-local texcount = tex and tex.count
+-- backward compatible ones
-if texlua then
- write_nl = print
- write = io.write
-end
+alternatives['map files'] = 'map'
+alternatives['enc files'] = 'enc'
+alternatives['cid maps'] = 'cid' -- great, why no cid files
+alternatives['font feature files'] = 'fea' -- and fea files here
+alternatives['opentype fonts'] = 'otf'
+alternatives['truetype fonts'] = 'ttf'
+alternatives['truetype collections'] = 'ttc'
+alternatives['truetype dictionary'] = 'dfont'
+alternatives['type1 fonts'] = 'pfb'
--[[ldx--
-<p>This is a prelude to a more extensive logging module. For the sake
-of parsing log files, in addition to the standard logging we will
-provide an <l n='xml'/> structured file. Actually, any logging that
-is hooked into callbacks will be \XML\ by default.</p>
+<p>If you wondered about some of the previous mappings, how about
+the next bunch:</p>
--ldx]]--
-logs = logs or { }
-logs.xml = logs.xml or { }
-logs.tex = logs.tex or { }
+-- kpse specific ones (a few omitted) .. we only add them for locating
+-- files that we don't use anyway
+
+formats['base'] = 'MFBASES' suffixes['base'] = { 'base', 'bas' }
+formats['bib'] = '' suffixes['bib'] = { 'bib' }
+formats['bitmap font'] = '' suffixes['bitmap font'] = { }
+formats['bst'] = '' suffixes['bst'] = { 'bst' }
+formats['cmap files'] = 'CMAPFONTS' suffixes['cmap files'] = { 'cmap' }
+formats['cnf'] = '' suffixes['cnf'] = { 'cnf' }
+formats['cweb'] = '' suffixes['cweb'] = { 'w', 'web', 'ch' }
+formats['dvips config'] = '' suffixes['dvips config'] = { }
+formats['gf'] = '' suffixes['gf'] = { '<resolution>gf' }
+formats['graphic/figure'] = '' suffixes['graphic/figure'] = { 'eps', 'epsi' }
+formats['ist'] = '' suffixes['ist'] = { 'ist' }
+formats['lig files'] = 'LIGFONTS' suffixes['lig files'] = { 'lig' }
+formats['ls-R'] = '' suffixes['ls-R'] = { }
+formats['mem'] = 'MPMEMS' suffixes['mem'] = { 'mem' }
+formats['MetaPost support'] = '' suffixes['MetaPost support'] = { }
+formats['mf'] = 'MFINPUTS' suffixes['mf'] = { 'mf' }
+formats['mft'] = '' suffixes['mft'] = { 'mft' }
+formats['misc fonts'] = '' suffixes['misc fonts'] = { }
+formats['other text files'] = '' suffixes['other text files'] = { }
+formats['other binary files'] = '' suffixes['other binary files'] = { }
+formats['pdftex config'] = 'PDFTEXCONFIG' suffixes['pdftex config'] = { }
+formats['pk'] = '' suffixes['pk'] = { '<resolution>pk' }
+formats['PostScript header'] = 'TEXPSHEADERS' suffixes['PostScript header'] = { 'pro' }
+formats['sfd'] = 'SFDFONTS' suffixes['sfd'] = { 'sfd' }
+formats['TeX system documentation'] = '' suffixes['TeX system documentation'] = { }
+formats['TeX system sources'] = '' suffixes['TeX system sources'] = { }
+formats['Troff fonts'] = '' suffixes['Troff fonts'] = { }
+formats['type42 fonts'] = 'T42FONTS' suffixes['type42 fonts'] = { }
+formats['web'] = '' suffixes['web'] = { 'web', 'ch' }
+formats['web2c files'] = 'WEB2C' suffixes['web2c files'] = { }
+formats['fontconfig files'] = 'FONTCONFIG_PATH' suffixes['fontconfig files'] = { } -- not unique
---[[ldx--
-<p>This looks pretty ugly but we need to speed things up a bit.</p>
---ldx]]--
+alternatives['subfont definition files'] = 'sfd'
-logs.moreinfo = [[
-more information about ConTeXt and the tools that come with it can be found at:
+-- A few accessors, mostly for command line tool.
-maillist : ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
-webpage : http://www.pragma-ade.nl / http://tex.aanhet.net
-wiki : http://contextgarden.net
-]]
+function resolvers.suffix_of_format(str)
+ local s = suffixes[str]
+ return s and s[1] or ""
+end
-logs.levels = {
- ['error'] = 1,
- ['warning'] = 2,
- ['info'] = 3,
- ['debug'] = 4,
-}
+function resolvers.suffixes_of_format(str)
+ return suffixes[str] or { }
+end
-logs.functions = {
- 'report', 'start', 'stop', 'push', 'pop', 'line', 'direct',
- 'start_run', 'stop_run',
- 'start_page_number', 'stop_page_number',
- 'report_output_pages', 'report_output_log',
- 'report_tex_stat', 'report_job_stat',
- 'show_open', 'show_close', 'show_load',
-}
+-- As we don't register additional suffixes anyway, we can as well
+-- freeze the reverse map here.
-logs.tracers = {
-}
+for name, suffixlist in next, suffixes do
+ for i=1,#suffixlist do
+ suffixmap[suffixlist[i]] = name
+ end
+end
-logs.level = 0
-logs.mode = string.lower((os.getenv("MTX.LOG.MODE") or os.getenv("MTX_LOG_MODE") or "tex"))
+setmetatable(suffixes, { __newindex = function(suffixes,name,suffixlist)
+ rawset(suffixes,name,suffixlist)
+ suffixes[name] = suffixlist
+ for i=1,#suffixlist do
+ suffixmap[suffixlist[i]] = name
+ end
+end } )
-function logs.set_level(level)
- logs.level = logs.levels[level] or level
+for name, format in next, formats do
+ dangerous[name] = true
end
-function logs.set_method(method)
- for _, v in next, logs.functions do
- logs[v] = logs[method][v] or function() end
- end
+-- because vf searching is somewhat dangerous, we want to prevent
+-- too liberal searching esp because we do a lookup on the current
+-- path anyway; only tex (or any) is safe
+
+dangerous.tex = nil
+
+
+-- more helpers
+
+function resolvers.format_of_var(str)
+ return formats[str] or formats[alternatives[str]] or ''
end
--- tex logging
+function resolvers.format_of_suffix(str) -- of file
+ return suffixmap[file.extname(str)] or 'tex'
+end
-function logs.tex.report(category,fmt,...) -- new
- if fmt then
- write_nl(category .. " | " .. format(fmt,...))
- else
- write_nl(category .. " |")
- end
+function resolvers.variable_of_format(str)
+ return formats[str] or formats[alternatives[str]] or ''
end
-function logs.tex.line(fmt,...) -- new
- if fmt then
- write_nl(format(fmt,...))
- else
- write_nl("")
+function resolvers.var_of_format_or_suffix(str)
+ local v = formats[str]
+ if v then
+ return v
+ end
+ v = formats[alternatives[str]]
+ if v then
+ return v
end
+ v = suffixmap[fileextname(str)]
+ if v then
+ return formats[v]
+ end
+ return ''
end
---~ function logs.tex.start_page_number()
---~ local real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
---~ if real > 0 then
---~ if user > 0 then
---~ if sub > 0 then
---~ write(format("[%s.%s.%s",real,user,sub))
---~ else
---~ write(format("[%s.%s",real,user))
---~ end
---~ else
---~ write(format("[%s",real))
---~ end
---~ else
---~ write("[-")
---~ end
---~ end
-
---~ function logs.tex.stop_page_number()
---~ write("]")
---~ end
-local real, user, sub
-function logs.tex.start_page_number()
- real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
-end
+end -- of closure
-function logs.tex.stop_page_number()
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- logs.report("pages", "flushing realpage %s, userpage %s, subpage %s",real,user,sub)
- else
- logs.report("pages", "flushing realpage %s, userpage %s",real,user)
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['data-tmp'] = {
+ version = 1.100,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--[[ldx--
+<p>This module deals with caching data. It sets up the paths and
+implements loaders and savers for tables. Best is to set the
+following variable. When not set, the usual paths will be
+checked. Personally I prefer the (users) temporary path.</p>
+
+</code>
+TEXMFCACHE=$TMP;$TEMP;$TMPDIR;$TEMPDIR;$HOME;$TEXMFVAR;$VARTEXMF;.
+</code>
+
+<p>Currently we do no locking when we write files. This is no real
+problem because most caching involves fonts and the chance of them
+being written at the same time is small. We also need to extend
+luatools with a recache feature.</p>
+--ldx]]--
+
+local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
+local mkdirs, isdir = dir.mkdirs, lfs.isdir
+
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end)
+
+local report_cache = logs.new("cache")
+
+local report_resolvers = logs.new("resolvers")
+
+caches = caches or { }
+
+caches.base = caches.base or "luatex-cache"
+caches.more = caches.more or "context"
+caches.direct = false -- true is faster but may need huge amounts of memory
+caches.tree = false
+caches.force = true
+caches.ask = false
+caches.defaults = { "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" }
+
+local writable, readables, usedreadables = nil, { }, { }
+
+-- we could use a metatable for writable and readable but not yet
+
+local function identify()
+ -- Combining the loops makes it messy. First we check the format cache path
+ -- and when the last component is not present we try to create it.
+ local texmfcaches = resolvers.clean_path_list("TEXMFCACHE")
+ if texmfcaches then
+ for k=1,#texmfcaches do
+ local cachepath = texmfcaches[k]
+ if cachepath ~= "" then
+ cachepath = resolvers.clean_path(cachepath)
+ cachepath = file.collapse_path(cachepath)
+ local valid = isdir(cachepath)
+ if valid then
+ if file.isreadable(cachepath) then
+ readables[#readables+1] = cachepath
+ if not writable and file.iswritable(cachepath) then
+ writable = cachepath
+ end
+ end
+ elseif not writable and caches.force then
+ local cacheparent = file.dirname(cachepath)
+ if file.iswritable(cacheparent) then
+ if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
+ mkdirs(cachepath)
+ if isdir(cachepath) and file.iswritable(cachepath) then
+ report_cache("created: %s",cachepath)
+ writable = cachepath
+ readables[#readables+1] = cachepath
+ end
+ end
+ end
+ end
end
- else
- logs.report("pages", "flushing realpage %s",real)
+ end
+ end
+ -- As a last resort we check some temporary paths but this time we don't
+ -- create them.
+ local texmfcaches = caches.defaults
+ if texmfcaches then
+ for k=1,#texmfcaches do
+ local cachepath = texmfcaches[k]
+ cachepath = resolvers.getenv(cachepath)
+ if cachepath ~= "" then
+ cachepath = resolvers.clean_path(cachepath)
+ local valid = isdir(cachepath)
+ if valid and file.isreadable(cachepath) then
+ if not writable and file.iswritable(cachepath) then
+ readables[#readables+1] = cachepath
+ writable = cachepath
+ break
+ end
+ end
+ end
+ end
+ end
+ -- Some extra checking. If we have no writable or readable path then we simply
+ -- quit.
+ if not writable then
+ report_cache("fatal error: there is no valid writable cache path defined")
+ os.exit()
+ elseif #readables == 0 then
+ report_cache("fatal error: there is no valid readable cache path defined")
+ os.exit()
+ end
+ -- why here
+ writable = dir.expand_name(resolvers.clean_path(writable)) -- just in case
+ -- moved here
+ local base, more, tree = caches.base, caches.more, caches.tree or caches.treehash() -- we have only one writable tree
+ if tree then
+ caches.tree = tree
+ writable = mkdirs(writable,base,more,tree)
+ for i=1,#readables do
+ readables[i] = file.join(readables[i],base,more,tree)
end
else
- logs.report("pages", "flushing page")
+ writable = mkdirs(writable,base,more)
+ for i=1,#readables do
+ readables[i] = file.join(readables[i],base,more)
+ end
end
- io.flush()
+ -- end
+ if trace_cache then
+ for i=1,#readables do
+ report_cache("using readable path '%s' (order %s)",readables[i],i)
+ end
+ report_cache("using writable path '%s'",writable)
+ end
+ identify = function()
+ return writable, readables
+ end
+ return writable, readables
end
-logs.tex.report_job_stat = statistics.show_job_stat
-
--- xml logging
-
-function logs.xml.report(category,fmt,...) -- new
- if fmt then
- write_nl(format("<r category='%s'>%s</r>",category,format(fmt,...)))
+function caches.usedpaths()
+ local writable, readables = identify()
+ if #readables > 1 then
+ local result = { }
+ for i=1,#readables do
+ local readable = readables[i]
+ if usedreadables[i] or readable == writable then
+ result[#result+1] = format("readable: '%s' (order %s)",readable,i)
+ end
+ end
+ result[#result+1] = format("writable: '%s'",writable)
+ return result
else
- write_nl(format("<r category='%s'/>",category))
+ return writable
end
end
-function logs.xml.line(fmt,...) -- new
- if fmt then
- write_nl(format("<r>%s</r>",format(fmt,...)))
+
+function caches.configfiles()
+ return table.concat(resolvers.instance.specification,";")
+end
+
+function caches.hashed(tree)
+ return md5.hex(gsub(lower(tree),"[\\\/]+","/"))
+end
+
+function caches.treehash()
+ local tree = caches.configfiles()
+ if not tree or tree == "" then
+ return false
else
- write_nl("<r/>")
+ return caches.hashed(tree)
end
end
-function logs.xml.start() if logs.level > 0 then tw("<%s>" ) end end
-function logs.xml.stop () if logs.level > 0 then tw("</%s>") end end
-function logs.xml.push () if logs.level > 0 then tw("<!-- ") end end
-function logs.xml.pop () if logs.level > 0 then tw(" -->" ) end end
+local r_cache, w_cache = { }, { } -- normally w in in r but who cares
-function logs.xml.start_run()
- write_nl("<?xml version='1.0' standalone='yes'?>")
- write_nl("<job>") -- xmlns='www.pragma-ade.com/luatex/schemas/context-job.rng'
- write_nl("")
+local function getreadablepaths(...) -- we can optimize this as we have at most 2 tags
+ local tags = { ... }
+ local hash = concat(tags,"/")
+ local done = r_cache[hash]
+ if not done then
+ local writable, readables = identify() -- exit if not found
+ if #tags > 0 then
+ done = { }
+ for i=1,#readables do
+ done[i] = file.join(readables[i],...)
+ end
+ else
+ done = readables
+ end
+ r_cache[hash] = done
+ end
+ return done
end
-function logs.xml.stop_run()
- write_nl("</job>")
+local function getwritablepath(...)
+ local tags = { ... }
+ local hash = concat(tags,"/")
+ local done = w_cache[hash]
+ if not done then
+ local writable, readables = identify() -- exit if not found
+ if #tags > 0 then
+ done = mkdirs(writable,...)
+ else
+ done = writable
+ end
+ w_cache[hash] = done
+ end
+ return done
end
-function logs.xml.start_page_number()
- write_nl(format("<p real='%s' page='%s' sub='%s'", texcount.realpageno, texcount.userpageno, texcount.subpageno))
-end
+caches.getreadablepaths = getreadablepaths
+caches.getwritablepath = getwritablepath
-function logs.xml.stop_page_number()
- write("/>")
- write_nl("")
+function caches.getfirstreadablefile(filename,...)
+ local rd = getreadablepaths(...)
+ for i=1,#rd do
+ local path = rd[i]
+ local fullname = file.join(path,filename)
+ if file.isreadable(fullname) then
+ usedreadables[i] = true
+ return fullname, path
+ end
+ end
+ return caches.setfirstwritablefile(filename,...)
end
-function logs.xml.report_output_pages(p,b)
- write_nl(format("<v k='pages' v='%s'/>", p))
- write_nl(format("<v k='bytes' v='%s'/>", b))
- write_nl("")
+function caches.setfirstwritablefile(filename,...)
+ local wr = getwritablepath(...)
+ local fullname = file.join(wr,filename)
+ return fullname, wr
end
-function logs.xml.report_output_log()
+function caches.define(category,subcategory) -- for old times sake
+ return function()
+ return getwritablepath(category,subcategory)
+ end
end
-function logs.xml.report_tex_stat(k,v)
- texiowrite_nl("log","<v k='"..k.."'>"..tostring(v).."</v>")
+function caches.setluanames(path,name)
+ return path .. "/" .. name .. ".tma", path .. "/" .. name .. ".tmc"
end
-local level = 0
-
-function logs.xml.show_open(name)
- level = level + 1
- texiowrite_nl(format("<f l='%s' n='%s'>",level,name))
+function caches.loaddata(readables,name)
+ if type(readables) == "string" then
+ readables = { readables }
+ end
+ for i=1,#readables do
+ local path = readables[i]
+ local tmaname, tmcname = caches.setluanames(path,name)
+ local loader = loadfile(tmcname) or loadfile(tmaname)
+ if loader then
+ loader = loader()
+ collectgarbage("step")
+ return loader
+ end
+ end
+ return false
end
-function logs.xml.show_close(name)
- texiowrite("</f> ")
- level = level - 1
+function caches.iswritable(filepath,filename)
+ local tmaname, tmcname = caches.setluanames(filepath,filename)
+ return file.iswritable(tmaname)
end
-function logs.xml.show_load(name)
- texiowrite_nl(format("<f l='%s' n='%s'/>",level+1,name))
+function caches.savedata(filepath,filename,data,raw)
+ local tmaname, tmcname = caches.setluanames(filepath,filename)
+ local reduce, simplify = true, true
+ if raw then
+ reduce, simplify = false, false
+ end
+ data.cache_uuid = os.uuid()
+ if caches.direct then
+ file.savedata(tmaname, table.serialize(data,'return',false,true,false)) -- no hex
+ else
+ table.tofile(tmaname, data,'return',false,true,false) -- maybe not the last true
+ end
+ local cleanup = resolvers.boolean_variable("PURGECACHE", false)
+ local strip = resolvers.boolean_variable("LUACSTRIP", true)
+ utils.lua.compile(tmaname, tmcname, cleanup, strip)
end
---
+-- moved from data-res:
-local name, banner = 'report', 'context'
+local content_state = { }
-local function report(category,fmt,...)
- if fmt then
- write_nl(format("%s | %s: %s",name,category,format(fmt,...)))
- elseif category then
- write_nl(format("%s | %s",name,category))
- else
- write_nl(format("%s |",name))
- end
+function caches.contentstate()
+ return content_state or { }
end
-local function simple(fmt,...)
- if fmt then
- write_nl(format("%s | %s",name,format(fmt,...)))
- else
- write_nl(format("%s |",name))
+function caches.loadcontent(cachename,dataname)
+ local name = caches.hashed(cachename)
+ local full, path = caches.getfirstreadablefile(name ..".lua","trees")
+ local filename = file.join(path,name)
+ local blob = loadfile(filename .. ".luc") or loadfile(filename .. ".lua")
+ if blob then
+ local data = blob()
+ if data and data.content and data.type == dataname and data.version == resolvers.cacheversion then
+ content_state[#content_state+1] = data.uuid
+ if trace_locating then
+ report_resolvers("loading '%s' for '%s' from '%s'",dataname,cachename,filename)
+ end
+ return data.content
+ elseif trace_locating then
+ report_resolvers("skipping '%s' for '%s' from '%s'",dataname,cachename,filename)
+ end
+ elseif trace_locating then
+ report_resolvers("skipping '%s' for '%s' from '%s'",dataname,cachename,filename)
end
end
-function logs.setprogram(_name_,_banner_,_verbose_)
- name, banner = _name_, _banner_
- if _verbose_ then
- trackers.enable("resolvers.locating")
- end
- logs.set_method("tex")
- logs.report = report -- also used in libraries
- logs.simple = simple -- only used in scripts !
- if utils then
- utils.report = simple
+function caches.collapsecontent(content)
+ for k, v in next, content do
+ if type(v) == "table" and #v == 1 then
+ content[k] = v[1]
+ end
end
- logs.verbose = _verbose_
end
-function logs.setverbose(what)
- if what then
- trackers.enable("resolvers.locating")
- else
- trackers.disable("resolvers.locating")
+function caches.savecontent(cachename,dataname,content)
+ local name = caches.hashed(cachename)
+ local full, path = caches.setfirstwritablefile(name ..".lua","trees")
+ local filename = file.join(path,name) -- is full
+ local luaname, lucname = filename .. ".lua", filename .. ".luc"
+ if trace_locating then
+ report_resolvers("preparing '%s' for '%s'",dataname,cachename)
+ end
+ local data = {
+ type = dataname,
+ root = cachename,
+ version = resolvers.cacheversion,
+ date = os.date("%Y-%m-%d"),
+ time = os.date("%H:%M:%S"),
+ content = content,
+ uuid = os.uuid(),
+ }
+ local ok = io.savedata(luaname,table.serialize(data,true))
+ if ok then
+ if trace_locating then
+ report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
+ end
+ if utils.lua.compile(luaname,lucname,false,true) then -- no cleanup but strip
+ if trace_locating then
+ report_resolvers("'%s' compiled to '%s'",dataname,lucname)
+ end
+ return true
+ else
+ if trace_locating then
+ report_resolvers("compiling failed for '%s', deleting file '%s'",dataname,lucname)
+ end
+ os.remove(lucname)
+ end
+ elseif trace_locating then
+ report_resolvers("unable to save '%s' in '%s' (access error)",dataname,luaname)
end
- logs.verbose = what or false
end
-function logs.extendbanner(_banner_,_verbose_)
- banner = banner .. " | ".. _banner_
- if _verbose_ ~= nil then
- logs.setverbose(what)
- end
-end
-logs.verbose = false
-logs.report = logs.tex.report
-logs.simple = logs.tex.report
-function logs.reportlines(str) -- todo: <lines></lines>
- for line in gmatch(str,"(.-)[\n\r]") do
- logs.report(line)
- end
-end
-function logs.reportline() -- for scripts too
- logs.report()
-end
+end -- of closure
-logs.simpleline = logs.reportline
+do -- create closure to overcome 200 locals limit
-function logs.reportbanner() -- for scripts too
- logs.report(banner)
-end
+if not modules then modules = { } end modules ['data-met'] = {
+ version = 1.100,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
-function logs.help(message,option)
- logs.reportbanner()
- logs.reportline()
- logs.reportlines(message)
- local moreinfo = logs.moreinfo or ""
- if moreinfo ~= "" and option ~= "nomoreinfo" then
- logs.reportline()
- logs.reportlines(moreinfo)
+local find = string.find
+
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+
+local report_resolvers = logs.new("resolvers")
+
+resolvers.locators = { notfound = { nil } } -- locate databases
+resolvers.hashers = { notfound = { nil } } -- load databases
+resolvers.generators = { notfound = { nil } } -- generate databases
+
+function resolvers.splitmethod(filename)
+ if not filename then
+ return { } -- safeguard
+ elseif type(filename) == "table" then
+ return filename -- already split
+ elseif not find(filename,"://") then
+ return { scheme="file", path = filename, original = filename } -- quick hack
+ else
+ return url.hashed(filename)
end
end
-logs.set_level('error')
-logs.set_method('tex')
-
-function logs.system(whereto,process,jobname,category,...)
- for i=1,10 do
- local f = io.open(whereto,"a")
- if f then
- f:write(format("%s %s => %s => %s => %s\r",os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...)))
- f:close()
- break
- else
- sleep(0.1)
+function resolvers.methodhandler(what, filename, filetype) -- ...
+ filename = file.collapse_path(filename)
+ local specification = (type(filename) == "string" and resolvers.splitmethod(filename)) or filename -- no or { }, let it bomb
+ local scheme = specification.scheme
+ local resolver = resolvers[what]
+ if resolver[scheme] then
+ if trace_locating then
+ report_resolvers("handler '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification))
end
+ return resolver[scheme](filename,filetype)
+ else
+ return resolver.tex(filename,filetype) -- todo: specification
end
end
---~ local syslogname = "oeps.xxx"
---~
---~ for i=1,10 do
---~ logs.system(syslogname,"context","test","fonts","font %s recached due to newer version (%s)","blabla","123")
---~ end
-
-function logs.fatal(where,...)
- logs.report(where,"fatal error: %s, aborting now",format(...))
- os.exit()
-end
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-inp'] = {
+if not modules then modules = { } end modules ['data-res'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -8111,70 +9499,45 @@ if not modules then modules = { } end modules ['data-inp'] = {
license = "see context related readme files",
}
--- After a few years using the code the large luat-inp.lua file
--- has been split up a bit. In the process some functionality was
--- dropped:
---
--- * support for reading lsr files
--- * selective scanning (subtrees)
--- * some public auxiliary functions were made private
---
--- TODO: os.getenv -> os.env[]
--- TODO: instances.[hashes,cnffiles,configurations,522]
--- TODO: check escaping in find etc, too much, too slow
-
--- This lib is multi-purpose and can be loaded again later on so that
--- additional functionality becomes available. We will split thislogs.report("fileio",
--- module in components once we're done with prototyping. This is the
--- first code I wrote for LuaTeX, so it needs some cleanup. Before changing
--- something in this module one can best check with Taco or Hans first; there
--- is some nasty trickery going on that relates to traditional kpse support.
-
--- To be considered: hash key lowercase, first entry in table filename
--- (any case), rest paths (so no need for optimization). Or maybe a
--- separate table that matches lowercase names to mixed case when
--- present. In that case the lower() cases can go away. I will do that
--- only when we run into problems with names ... well ... Iwona-Regular.
+-- In practice we will work within one tds tree, but i want to keep
+-- the option open to build tools that look at multiple trees, which is
+-- why we keep the tree specific data in a table. We used to pass the
+-- instance but for practical purposes we now avoid this and use a
+-- instance variable. We always have one instance active (sort of global).
--- Beware, loading and saving is overloaded in luat-tmp!
+-- todo: cache:/// home:///
local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
local next, type = next, type
-local lpegmatch = lpeg.match
-local trace_locating, trace_detail, trace_expansions = false, false, false
+local lpegP, lpegS, lpegR, lpegC, lpegCc, lpegCs, lpegCt = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-trackers.register("resolvers.locating", function(v) trace_locating = v end)
-trackers.register("resolvers.details", function(v) trace_detail = v end)
-trackers.register("resolvers.expansions", function(v) trace_expansions = v end) -- todo
+local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join
+local collapse_path = file.collapse_path
-if not resolvers then
- resolvers = {
- suffixes = { },
- formats = { },
- dangerous = { },
- suffixmap = { },
- alternatives = { },
- locators = { }, -- locate databases
- hashers = { }, -- load databases
- generators = { }, -- generate databases
- }
-end
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end)
+local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
+
+local report_resolvers = logs.new("resolvers")
-local resolvers = resolvers
+local expanded_path_from_list = resolvers.expanded_path_from_list
+local checked_variable = resolvers.checked_variable
+local split_configuration_path = resolvers.split_configuration_path
-resolvers.locators .notfound = { nil }
-resolvers.hashers .notfound = { nil }
-resolvers.generators.notfound = { nil }
+local ostype, osname, osenv, ossetenv, osgetenv = os.type, os.name, os.env, os.setenv, os.getenv
resolvers.cacheversion = '1.0.1'
-resolvers.cnfname = 'texmf.cnf'
-resolvers.luaname = 'texmfcnf.lua'
-resolvers.homedir = os.env[os.type == "windows" and 'USERPROFILE'] or os.env['HOME'] or '~'
-resolvers.cnfdefault = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}'
+resolvers.configbanner = ''
+resolvers.homedir = environment.homedir
+resolvers.criticalvars = { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF", "TEXMF", "TEXOS" }
+resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}' -- rubish path
+resolvers.luacnfname = 'texmfcnf.lua'
+resolvers.luacnfstate = "unknown"
-local dummy_path_expr = "^!*unset/*$"
+local unset_variable = "unset"
local formats = resolvers.formats
local suffixes = resolvers.suffixes
@@ -8182,104 +9545,12 @@ local dangerous = resolvers.dangerous
local suffixmap = resolvers.suffixmap
local alternatives = resolvers.alternatives
-formats['afm'] = 'AFMFONTS' suffixes['afm'] = { 'afm' }
-formats['enc'] = 'ENCFONTS' suffixes['enc'] = { 'enc' }
-formats['fmt'] = 'TEXFORMATS' suffixes['fmt'] = { 'fmt' }
-formats['map'] = 'TEXFONTMAPS' suffixes['map'] = { 'map' }
-formats['mp'] = 'MPINPUTS' suffixes['mp'] = { 'mp' }
-formats['ocp'] = 'OCPINPUTS' suffixes['ocp'] = { 'ocp' }
-formats['ofm'] = 'OFMFONTS' suffixes['ofm'] = { 'ofm', 'tfm' }
-formats['otf'] = 'OPENTYPEFONTS' suffixes['otf'] = { 'otf' } -- 'ttf'
-formats['opl'] = 'OPLFONTS' suffixes['opl'] = { 'opl' }
-formats['otp'] = 'OTPINPUTS' suffixes['otp'] = { 'otp' }
-formats['ovf'] = 'OVFFONTS' suffixes['ovf'] = { 'ovf', 'vf' }
-formats['ovp'] = 'OVPFONTS' suffixes['ovp'] = { 'ovp' }
-formats['tex'] = 'TEXINPUTS' suffixes['tex'] = { 'tex' }
-formats['tfm'] = 'TFMFONTS' suffixes['tfm'] = { 'tfm' }
-formats['ttf'] = 'TTFONTS' suffixes['ttf'] = { 'ttf', 'ttc', 'dfont' }
-formats['pfb'] = 'T1FONTS' suffixes['pfb'] = { 'pfb', 'pfa' }
-formats['vf'] = 'VFFONTS' suffixes['vf'] = { 'vf' }
-
-formats['fea'] = 'FONTFEATURES' suffixes['fea'] = { 'fea' }
-formats['cid'] = 'FONTCIDMAPS' suffixes['cid'] = { 'cid', 'cidmap' }
-
-formats ['texmfscripts'] = 'TEXMFSCRIPTS' -- new
-suffixes['texmfscripts'] = { 'rb', 'pl', 'py' } -- 'lua'
-
-formats ['lua'] = 'LUAINPUTS' -- new
-suffixes['lua'] = { 'lua', 'luc', 'tma', 'tmc' }
-
--- backward compatible ones
-
-alternatives['map files'] = 'map'
-alternatives['enc files'] = 'enc'
-alternatives['cid maps'] = 'cid' -- great, why no cid files
-alternatives['font feature files'] = 'fea' -- and fea files here
-alternatives['opentype fonts'] = 'otf'
-alternatives['truetype fonts'] = 'ttf'
-alternatives['truetype collections'] = 'ttc'
-alternatives['truetype dictionary'] = 'dfont'
-alternatives['type1 fonts'] = 'pfb'
-
--- obscure ones
-
-formats ['misc fonts'] = ''
-suffixes['misc fonts'] = { }
-
-formats ['sfd'] = 'SFDFONTS'
-suffixes ['sfd'] = { 'sfd' }
-alternatives['subfont definition files'] = 'sfd'
-
--- lib paths
-
-formats ['lib'] = 'CLUAINPUTS' -- new (needs checking)
-suffixes['lib'] = (os.libsuffix and { os.libsuffix }) or { 'dll', 'so' }
-
--- In practice we will work within one tds tree, but i want to keep
--- the option open to build tools that look at multiple trees, which is
--- why we keep the tree specific data in a table. We used to pass the
--- instance but for practical pusposes we now avoid this and use a
--- instance variable.
-
--- here we catch a few new thingies (todo: add these paths to context.tmf)
---
--- FONTFEATURES = .;$TEXMF/fonts/fea//
--- FONTCIDMAPS = .;$TEXMF/fonts/cid//
-
--- we always have one instance active
-
resolvers.instance = resolvers.instance or nil -- the current one (slow access)
-local instance = resolvers.instance or nil -- the current one (fast access)
+local instance = resolvers.instance or nil -- the current one (fast access)
function resolvers.newinstance()
- -- store once, freeze and faster (once reset we can best use
- -- instance.environment) maybe better have a register suffix
- -- function
-
- for k, v in next, suffixes do
- for i=1,#v do
- local vi = v[i]
- if vi then
- suffixmap[vi] = k
- end
- end
- end
-
- -- because vf searching is somewhat dangerous, we want to prevent
- -- too liberal searching esp because we do a lookup on the current
- -- path anyway; only tex (or any) is safe
-
- for k, v in next, formats do
- dangerous[k] = true
- end
- dangerous.tex = nil
-
- -- the instance
-
local newinstance = {
- rootpath = '',
- treepath = '',
progname = 'context',
engine = 'luatex',
format = '',
@@ -8287,26 +9558,19 @@ function resolvers.newinstance()
variables = { },
expansions = { },
files = { },
- remap = { },
- configuration = { },
- setup = { },
+ setups = { },
order = { },
found = { },
foundintrees = { },
- kpsevars = { },
+ origins = { },
hashes = { },
- cnffiles = { },
- luafiles = { },
+ specification = { },
lists = { },
remember = true,
diskcache = true,
renewcache = false,
- scandisk = true,
- cachepath = nil,
loaderror = false,
- sortdata = false,
savelists = true,
- cleanuppaths = true,
allresults = false,
pattern = nil, -- lists
data = { }, -- only for loading
@@ -8316,8 +9580,8 @@ function resolvers.newinstance()
local ne = newinstance.environment
- for k,v in next, os.env do
- ne[k] = resolvers.bare_variable(v)
+ for k, v in next, osenv do
+ ne[upper(k)] = checked_variable(v)
end
return newinstance
@@ -8339,91 +9603,68 @@ local function reset_hashes()
instance.found = { }
end
-local function check_configuration() -- not yet ok, no time for debugging now
- local ie, iv = instance.environment, instance.variables
- local function fix(varname,default)
- local proname = varname .. "." .. instance.progname or "crap"
- local p, v = ie[proname], ie[varname] or iv[varname]
- if not ((p and p ~= "") or (v and v ~= "")) then
- iv[varname] = default -- or environment?
- end
- end
- local name = os.name
- if name == "windows" then
- fix("OSFONTDIR", "c:/windows/fonts//")
- elseif name == "macosx" then
- fix("OSFONTDIR", "$HOME/Library/Fonts//;/Library/Fonts//;/System/Library/Fonts//")
- else
- -- bad luck
- end
- fix("LUAINPUTS" , ".;$TEXINPUTS;$TEXMFSCRIPTS") -- no progname, hm
- -- this will go away some day
- fix("FONTFEATURES", ".;$TEXMF/fonts/{data,fea}//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS")
- fix("FONTCIDMAPS" , ".;$TEXMF/fonts/{data,cid}//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS")
- --
- fix("LUATEXLIBS" , ".;$TEXMF/luatex/lua//")
-end
-
-function resolvers.bare_variable(str) -- assumes str is a string
- return (gsub(str,"\s*([\"\']?)(.+)%1\s*", "%2"))
-end
-
-function resolvers.settrace(n) -- no longer number but: 'locating' or 'detail'
- if n then
- trackers.disable("resolvers.*")
- trackers.enable("resolvers."..n)
+function resolvers.setenv(key,value)
+ if instance then
+ instance.environment[key] = value
+ ossetenv(key,value)
end
end
-resolvers.settrace(os.getenv("MTX_INPUT_TRACE"))
-
-function resolvers.osenv(key)
- local ie = instance.environment
- local value = ie[key]
- if value == nil then
- -- local e = os.getenv(key)
- local e = os.env[key]
- if e == nil then
- -- value = "" -- false
- else
- value = resolvers.bare_variable(e)
- end
- ie[key] = value
+function resolvers.getenv(key)
+ local value = instance.environment[key]
+ if value and value ~= "" then
+ return value
+ else
+ local e = osgetenv(key)
+ return e ~= nil and e ~= "" and checked_variable(e) or ""
end
- return value or ""
-end
-
-function resolvers.env(key)
- return instance.environment[key] or resolvers.osenv(key)
end
---
+resolvers.env = resolvers.getenv
local function expand_vars(lst) -- simple vars
- local variables, env = instance.variables, resolvers.env
+ local variables, getenv = instance.variables, resolvers.getenv
local function resolve(a)
- return variables[a] or env(a)
+ local va = variables[a] or ""
+ return (va ~= "" and va) or getenv(a) or ""
end
for k=1,#lst do
- lst[k] = gsub(lst[k],"%$([%a%d%_%-]+)",resolve)
+ local var = lst[k]
+ var = gsub(var,"%$([%a%d%_%-]+)",resolve)
+ var = gsub(var,";+",";")
+ var = gsub(var,";[!{}/\\]+;",";")
+ lst[k] = var
end
end
-local function expanded_var(var) -- simple vars
- local function resolve(a)
- return instance.variables[a] or resolvers.env(a)
+local function resolve(key)
+ local value = instance.variables[key]
+ if value and value ~= "" then
+ return value
+ end
+ local value = instance.environment[key]
+ if value and value ~= "" then
+ return value
end
- return (gsub(var,"%$([%a%d%_%-]+)",resolve))
+ local e = osgetenv(key)
+ return e ~= nil and e ~= "" and checked_variable(e) or ""
+end
+
+local function expanded_var(var) -- simple vars
+ var = gsub(var,"%$([%a%d%_%-]+)",resolve)
+ var = gsub(var,";+",";")
+ var = gsub(var,";[!{}/\\]+;",";")
+ return var
end
local function entry(entries,name)
- if name and (name ~= "") then
+ if name and name ~= "" then
name = gsub(name,'%$','')
local result = entries[name..'.'..instance.progname] or entries[name]
if result then
return result
else
- result = resolvers.env(name)
+ result = resolvers.getenv(name)
if result then
instance.variables[name] = result
resolvers.expand_variables()
@@ -8443,438 +9684,147 @@ local function is_entry(entries,name)
end
end
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
--- this one is better and faster, but it took me a while to realize
--- that this kind of replacement is cleaner than messy parsing and
--- fuzzy concatenating we can probably gain a bit with selectively
--- applying lpeg, but experiments with lpeg parsing this proved not to
--- work that well; the parsing is ok, but dealing with the resulting
--- table is a pain because we need to work inside-out recursively
-
-local function do_first(a,b)
- local t = { }
- for s in gmatch(b,"[^,]+") do t[#t+1] = a .. s end
- return "{" .. concat(t,",") .. "}"
-end
-
-local function do_second(a,b)
- local t = { }
- for s in gmatch(a,"[^,]+") do t[#t+1] = s .. b end
- return "{" .. concat(t,",") .. "}"
-end
-
-local function do_both(a,b)
- local t = { }
- for sa in gmatch(a,"[^,]+") do
- for sb in gmatch(b,"[^,]+") do
- t[#t+1] = sa .. sb
- end
- end
- return "{" .. concat(t,",") .. "}"
-end
-
-local function do_three(a,b,c)
- return a .. b.. c
-end
-
-local function splitpathexpr(str, t, validate)
- -- no need for further optimization as it is only called a
- -- few times, we can use lpeg for the sub
- if trace_expansions then
- logs.report("fileio","expanding variable '%s'",str)
- end
- t = t or { }
- str = gsub(str,",}",",@}")
- str = gsub(str,"{,","{@,")
- -- str = "@" .. str .. "@"
- local ok, done
- while true do
- done = false
- while true do
- str, ok = gsub(str,"([^{},]+){([^{}]+)}",do_first)
- if ok > 0 then done = true else break end
- end
- while true do
- str, ok = gsub(str,"{([^{}]+)}([^{},]+)",do_second)
- if ok > 0 then done = true else break end
- end
- while true do
- str, ok = gsub(str,"{([^{}]+)}{([^{}]+)}",do_both)
- if ok > 0 then done = true else break end
- end
- str, ok = gsub(str,"({[^{}]*){([^{}]+)}([^{}]*})",do_three)
- if ok > 0 then done = true end
- if not done then break end
- end
- str = gsub(str,"[{}]", "")
- str = gsub(str,"@","")
- if validate then
- for s in gmatch(str,"[^,]+") do
- s = validate(s)
- if s then t[#t+1] = s end
- end
- else
- for s in gmatch(str,"[^,]+") do
- t[#t+1] = s
- end
- end
- if trace_expansions then
- for k=1,#t do
- logs.report("fileio","% 4i: %s",k,t[k])
- end
- end
- return t
-end
-
-local function expanded_path_from_list(pathlist) -- maybe not a list, just a path
- -- a previous version fed back into pathlist
- local newlist, ok = { }, false
- for k=1,#pathlist do
- if find(pathlist[k],"[{}]") then
- ok = true
- break
- end
- end
- if ok then
- local function validate(s)
- s = file.collapse_path(s)
- return s ~= "" and not find(s,dummy_path_expr) and s
- end
- for k=1,#pathlist do
- splitpathexpr(pathlist[k],newlist,validate)
- end
- else
- for k=1,#pathlist do
- for p in gmatch(pathlist[k],"([^,]+)") do
- p = file.collapse_path(p)
- if p ~= "" then newlist[#newlist+1] = p end
- end
- end
- end
- return newlist
-end
-
--- we follow a rather traditional approach:
---
--- (1) texmf.cnf given in TEXMFCNF
--- (2) texmf.cnf searched in default variable
---
--- also we now follow the stupid route: if not set then just assume *one*
--- cnf file under texmf (i.e. distribution)
-
-local args = environment and environment.original_arguments or arg -- this needs a cleanup
-
-resolvers.ownbin = resolvers.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex"
-resolvers.ownbin = gsub(resolvers.ownbin,"\\","/")
-
-function resolvers.getownpath()
- local ownpath = resolvers.ownpath or os.selfdir
- if not ownpath or ownpath == "" or ownpath == "unset" then
- ownpath = args[-1] or arg[-1]
- ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/"))
- if not ownpath or ownpath == "" then
- ownpath = args[-0] or arg[-0]
- ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/"))
- end
- local binary = resolvers.ownbin
- if not ownpath or ownpath == "" then
- ownpath = ownpath and file.dirname(binary)
- end
- if not ownpath or ownpath == "" then
- if os.binsuffix ~= "" then
- binary = file.replacesuffix(binary,os.binsuffix)
- end
- for p in gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do
- local b = file.join(p,binary)
- if lfs.isfile(b) then
- -- we assume that after changing to the path the currentdir function
- -- resolves to the real location and use this side effect here; this
- -- trick is needed because on the mac installations use symlinks in the
- -- path instead of real locations
- local olddir = lfs.currentdir()
- if lfs.chdir(p) then
- local pp = lfs.currentdir()
- if trace_locating and p ~= pp then
- logs.report("fileio","following symlink '%s' to '%s'",p,pp)
- end
- ownpath = pp
- lfs.chdir(olddir)
- else
- if trace_locating then
- logs.report("fileio","unable to check path '%s'",p)
- end
- ownpath = p
- end
- break
- end
- end
- end
- if not ownpath or ownpath == "" then
- ownpath = "."
- logs.report("fileio","forcing fallback ownpath .")
- elseif trace_locating then
- logs.report("fileio","using ownpath '%s'",ownpath)
- end
- end
- resolvers.ownpath = ownpath
- function resolvers.getownpath()
- return resolvers.ownpath
- end
- return ownpath
-end
-
-local own_places = { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF" }
-
-local function identify_own()
- local ownpath = resolvers.getownpath() or dir.current()
- local ie = instance.environment
- if ownpath then
- if resolvers.env('SELFAUTOLOC') == "" then os.env['SELFAUTOLOC'] = file.collapse_path(ownpath) end
- if resolvers.env('SELFAUTODIR') == "" then os.env['SELFAUTODIR'] = file.collapse_path(ownpath .. "/..") end
- if resolvers.env('SELFAUTOPARENT') == "" then os.env['SELFAUTOPARENT'] = file.collapse_path(ownpath .. "/../..") end
- else
- logs.report("fileio","error: unable to locate ownpath")
- os.exit()
- end
- if resolvers.env('TEXMFCNF') == "" then os.env['TEXMFCNF'] = resolvers.cnfdefault end
- if resolvers.env('TEXOS') == "" then os.env['TEXOS'] = resolvers.env('SELFAUTODIR') end
- if resolvers.env('TEXROOT') == "" then os.env['TEXROOT'] = resolvers.env('SELFAUTOPARENT') end
+function resolvers.report_critical_variables()
if trace_locating then
- for i=1,#own_places do
- local v = own_places[i]
- logs.report("fileio","variable '%s' set to '%s'",v,resolvers.env(v) or "unknown")
+ for i=1,#resolvers.criticalvars do
+ local v = resolvers.criticalvars[i]
+ report_resolvers("variable '%s' set to '%s'",v,resolvers.getenv(v) or "unknown")
end
+ report_resolvers()
end
- identify_own = function() end
+ resolvers.report_critical_variables = function() end
end
-function resolvers.identify_cnf()
- if #instance.cnffiles == 0 then
- -- fallback
- identify_own()
- -- the real search
- resolvers.expand_variables()
- local t = resolvers.split_path(resolvers.env('TEXMFCNF'))
- t = expanded_path_from_list(t)
- expand_vars(t) -- redundant
- local function locate(filename,list)
- for i=1,#t do
- local ti = t[i]
- local texmfcnf = file.collapse_path(file.join(ti,filename))
- if lfs.isfile(texmfcnf) then
- list[#list+1] = texmfcnf
- end
- end
- end
- locate(resolvers.luaname,instance.luafiles)
- locate(resolvers.cnfname,instance.cnffiles)
- end
-end
-
-local function load_cnf_file(fname)
- fname = resolvers.clean_path(fname)
- local lname = file.replacesuffix(fname,'lua')
- if lfs.isfile(lname) then
- local dname = file.dirname(fname) -- fname ?
- if not instance.configuration[dname] then
- resolvers.load_data(dname,'configuration',lname and file.basename(lname))
- instance.order[#instance.order+1] = instance.configuration[dname]
+local function identify_configuration_files()
+ local specification = instance.specification
+ if #specification == 0 then
+ local cnfspec = resolvers.getenv('TEXMFCNF')
+ if cnfspec == "" then
+ cnfspec = resolvers.luacnfspec
+ resolvers.luacnfstate = "default"
+ else
+ resolvers.luacnfstate = "environment"
end
- else
- f = io.open(fname)
- if f then
- if trace_locating then
- logs.report("fileio","loading configuration file %s", fname)
- end
- local line, data, n, k, v
- local dname = file.dirname(fname)
- if not instance.configuration[dname] then
- instance.configuration[dname] = { }
- instance.order[#instance.order+1] = instance.configuration[dname]
- end
- local data = instance.configuration[dname]
- while true do
- local line, n = f:read(), 0
- if line then
- while true do -- join lines
- line, n = gsub(line,"\\%s*$", "")
- if n > 0 then
- line = line .. f:read()
- else
- break
+ resolvers.report_critical_variables()
+ resolvers.expand_variables()
+ local cnfpaths = expanded_path_from_list(resolvers.split_path(cnfspec))
+ expand_vars(cnfpaths) --- hm
+ local luacnfname = resolvers.luacnfname
+ for i=1,#cnfpaths do
+ local filename = collapse_path(filejoin(cnfpaths[i],luacnfname))
+ if lfs.isfile(filename) then
+ specification[#specification+1] = filename
+ end
+ end
+ end
+end
+
+local function load_configuration_files()
+ local specification = instance.specification
+ if #specification > 0 then
+ local luacnfname = resolvers.luacnfname
+ for i=1,#specification do
+ local filename = specification[i]
+ local pathname = filedirname(filename)
+ local filename = filejoin(pathname,luacnfname)
+ local blob = loadfile(filename)
+ if blob then
+ local data = blob()
+ data = data and data.content
+ local setups = instance.setups
+ if data then
+ if trace_locating then
+ report_resolvers("loading configuration file '%s'",filename)
+ report_resolvers()
+ end
+ -- flattening is easier to deal with as we need to collapse
+ local t = { }
+ for k, v in next, data do -- v = progname
+ if v ~= unset_variable then
+ local kind = type(v)
+ if kind == "string" then
+ t[k] = v
+ elseif kind == "table" then
+ -- this operates on the table directly
+ setters.initialize(filename,k,v)
+ -- this doesn't (maybe metatables some day)
+ for kk, vv in next, v do -- vv = variable
+ if vv ~= unset_variable then
+ if type(vv) == "string" then
+ t[kk.."."..k] = vv
+ end
+ end
+ end
+ else
+ -- report_resolvers("strange key '%s' in configuration file '%s'",k,filename)
+ end
end
end
- if not find(line,"^[%%#]") then
- local l = gsub(line,"%s*%%.*$","")
- local k, v = match(l,"%s*(.-)%s*=%s*(.-)%s*$")
- if k and v and not data[k] then
- v = gsub(v,"[%%#].*",'')
- data[k] = gsub(v,"~","$HOME")
- instance.kpsevars[k] = true
+ setups[pathname] = t
+
+ if resolvers.luacnfstate == "default" then
+ -- the following code is not tested
+ local cnfspec = t["TEXMFCNF"]
+ if cnfspec then
+ -- we push the value into the main environment (osenv) so
+ -- that it takes precedence over the default one and therefore
+ -- also over following definitions
+ resolvers.setenv('TEXMFCNF',cnfspec)
+ -- we now identify and load the specified configuration files
+ instance.specification = { }
+ identify_configuration_files()
+ load_configuration_files()
+ -- we prevent further overload of the configuration variable
+ resolvers.luacnfstate = "configuration"
+ -- we quit the outer loop
+ break
end
end
+
else
- break
+ if trace_locating then
+ report_resolvers("skipping configuration file '%s'",filename)
+ end
+ setups[pathname] = { }
+ instance.loaderror = true
end
+ elseif trace_locating then
+ report_resolvers("skipping configuration file '%s'",filename)
+ end
+ instance.order[#instance.order+1] = instance.setups[pathname]
+ if instance.loaderror then
+ break
end
- f:close()
- elseif trace_locating then
- logs.report("fileio","skipping configuration file '%s'", fname)
end
+ elseif trace_locating then
+ report_resolvers("warning: no lua configuration files found")
end
end
-local function collapse_cnf_data() -- potential optimization: pass start index (setup and configuration are shared)
- local order = instance.order
+local function collapse_configuration_data() -- potential optimization: pass start index (setup and configuration are shared)
+ local order, variables, environment, origins = instance.order, instance.variables, instance.environment, instance.origins
for i=1,#order do
local c = order[i]
for k,v in next, c do
- if not instance.variables[k] then
- if instance.environment[k] then
- instance.variables[k] = instance.environment[k]
+ if variables[k] then
+ -- okay
+ else
+ local ek = environment[k]
+ if ek and ek ~= "" then
+ variables[k], origins[k] = ek, "env"
else
- instance.kpsevars[k] = true
- instance.variables[k] = resolvers.bare_variable(v)
+ local bv = checked_variable(v)
+ variables[k], origins[k] = bv, "cnf"
end
end
end
end
end
-function resolvers.load_cnf()
- local function loadoldconfigdata()
- local cnffiles = instance.cnffiles
- for i=1,#cnffiles do
- load_cnf_file(cnffiles[i])
- end
- end
- -- instance.cnffiles contain complete names now !
- -- we still use a funny mix of cnf and new but soon
- -- we will switch to lua exclusively as we only use
- -- the file to collect the tree roots
- if #instance.cnffiles == 0 then
- if trace_locating then
- logs.report("fileio","no cnf files found (TEXMFCNF may not be set/known)")
- end
- else
- local cnffiles = instance.cnffiles
- instance.rootpath = cnffiles[1]
- for k=1,#cnffiles do
- instance.cnffiles[k] = file.collapse_path(cnffiles[k])
- end
- for i=1,3 do
- instance.rootpath = file.dirname(instance.rootpath)
- end
- instance.rootpath = file.collapse_path(instance.rootpath)
- if instance.diskcache and not instance.renewcache then
- resolvers.loadoldconfig(instance.cnffiles)
- if instance.loaderror then
- loadoldconfigdata()
- resolvers.saveoldconfig()
- end
- else
- loadoldconfigdata()
- if instance.renewcache then
- resolvers.saveoldconfig()
- end
- end
- collapse_cnf_data()
- end
- check_configuration()
-end
-
-function resolvers.load_lua()
- if #instance.luafiles == 0 then
- -- yet harmless
- else
- instance.rootpath = instance.luafiles[1]
- local luafiles = instance.luafiles
- for k=1,#luafiles do
- instance.luafiles[k] = file.collapse_path(luafiles[k])
- end
- for i=1,3 do
- instance.rootpath = file.dirname(instance.rootpath)
- end
- instance.rootpath = file.collapse_path(instance.rootpath)
- resolvers.loadnewconfig()
- collapse_cnf_data()
- end
- check_configuration()
-end
-
-- database loading
-function resolvers.load_hash()
- resolvers.locatelists()
- if instance.diskcache and not instance.renewcache then
- resolvers.loadfiles()
- if instance.loaderror then
- resolvers.loadlists()
- resolvers.savefiles()
- end
- else
- resolvers.loadlists()
- if instance.renewcache then
- resolvers.savefiles()
- end
- end
-end
-
-function resolvers.append_hash(type,tag,name)
- if trace_locating then
- logs.report("fileio","hash '%s' appended",tag)
- end
- insert(instance.hashes, { ['type']=type, ['tag']=tag, ['name']=name } )
-end
-
-function resolvers.prepend_hash(type,tag,name)
- if trace_locating then
- logs.report("fileio","hash '%s' prepended",tag)
- end
- insert(instance.hashes, 1, { ['type']=type, ['tag']=tag, ['name']=name } )
-end
-
-function resolvers.extend_texmf_var(specification) -- crap, we could better prepend the hash
--- local t = resolvers.expanded_path_list('TEXMF') -- full expansion
- local t = resolvers.split_path(resolvers.env('TEXMF'))
- insert(t,1,specification)
- local newspec = concat(t,";")
- if instance.environment["TEXMF"] then
- instance.environment["TEXMF"] = newspec
- elseif instance.variables["TEXMF"] then
- instance.variables["TEXMF"] = newspec
- else
- -- weird
- end
- resolvers.expand_variables()
- reset_hashes()
-end
-
-- locators
-function resolvers.locatelists()
- local texmfpaths = resolvers.clean_path_list('TEXMF')
- for i=1,#texmfpaths do
- local path = texmfpaths[i]
- if trace_locating then
- logs.report("fileio","locating list of '%s'",path)
- end
- resolvers.locatedatabase(file.collapse_path(path))
- end
-end
-
function resolvers.locatedatabase(specification)
return resolvers.methodhandler('locators', specification)
end
@@ -8882,11 +9832,11 @@ end
function resolvers.locators.tex(specification)
if specification and specification ~= '' and lfs.isdir(specification) then
if trace_locating then
- logs.report("fileio","tex locator '%s' found",specification)
+ report_resolvers("tex locator '%s' found",specification)
end
- resolvers.append_hash('file',specification,filename)
+ resolvers.append_hash('file',specification,filename,true) -- cache
elseif trace_locating then
- logs.report("fileio","tex locator '%s' not found",specification)
+ report_resolvers("tex locator '%s' not found",specification)
end
end
@@ -8896,9 +9846,8 @@ function resolvers.hashdatabase(tag,name)
return resolvers.methodhandler('hashers',tag,name)
end
-function resolvers.loadfiles()
- instance.loaderror = false
- instance.files = { }
+local function load_file_databases()
+ instance.loaderror, instance.files = false, { }
if not instance.renewcache then
local hashes = instance.hashes
for k=1,#hashes do
@@ -8909,194 +9858,134 @@ function resolvers.loadfiles()
end
end
-function resolvers.hashers.tex(tag,name)
- resolvers.load_data(tag,'files')
-end
-
--- generators:
-
-function resolvers.loadlists()
- local hashes = instance.hashes
- for i=1,#hashes do
- resolvers.generatedatabase(hashes[i].tag)
+function resolvers.hashers.tex(tag,name) -- used where?
+ local content = caches.loadcontent(tag,'files')
+ if content then
+ instance.files[tag] = content
+ else
+ instance.files[tag] = { }
+ instance.loaderror = true
end
end
-function resolvers.generatedatabase(specification)
- return resolvers.methodhandler('generators', specification)
-end
-
--- starting with . or .. etc or funny char
-
-local weird = lpeg.P(".")^1 + lpeg.anywhere(lpeg.S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
-
---~ local l_forbidden = lpeg.S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t")
---~ local l_confusing = lpeg.P(" ")
---~ local l_character = lpeg.patterns.utf8
---~ local l_dangerous = lpeg.P(".")
-
---~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * lpeg.P(-1)
---~ ----- l_normal = l_normal * lpeg.Cc(true) + lpeg.Cc(false)
-
---~ local function test(str)
---~ print(str,lpeg.match(l_normal,str))
---~ end
---~ test("ヒラギノ明朝 Pro W3")
---~ test("..ヒラギノ明朝 Pro W3")
---~ test(":ヒラギノ明朝 Pro W3;")
---~ test("ヒラギノ明朝 /Pro W3;")
---~ test("ヒラギノ明朝 Pro W3")
-
-function resolvers.generators.tex(specification)
- local tag = specification
- if trace_locating then
- logs.report("fileio","scanning path '%s'",specification)
- end
- instance.files[tag] = { }
- local files = instance.files[tag]
- local n, m, r = 0, 0, 0
- local spec = specification .. '/'
- local attributes = lfs.attributes
- local directory = lfs.dir
- local function action(path)
- local full
- if path then
- full = spec .. path .. '/'
- else
- full = spec
- end
- for name in directory(full) do
- if not lpegmatch(weird,name) then
- -- if lpegmatch(l_normal,name) then
- local mode = attributes(full..name,'mode')
- if mode == 'file' then
- if path then
- n = n + 1
- local f = files[name]
- if f then
- if type(f) == 'string' then
- files[name] = { f, path }
- else
- f[#f+1] = path
- end
- else -- probably unique anyway
- files[name] = path
- local lower = lower(name)
- if name ~= lower then
- files["remap:"..lower] = name
- r = r + 1
- end
- end
+local function locate_file_databases()
+ -- todo: cache:// and tree:// (runtime)
+ local texmfpaths = resolvers.expanded_path_list('TEXMF')
+ for i=1,#texmfpaths do
+ local path = collapse_path(texmfpaths[i])
+ local stripped = gsub(path,"^!!","")
+ local runtime = stripped == path
+ path = resolvers.clean_path(path)
+ if stripped ~= "" then
+ if lfs.isdir(path) then
+ local spec = resolvers.splitmethod(stripped)
+ if spec.scheme == "cache" then
+ stripped = spec.path
+ elseif runtime and (spec.noscheme or spec.scheme == "file") then
+ stripped = "tree:///" .. stripped
+ end
+ if trace_locating then
+ if runtime then
+ report_resolvers("locating list of '%s' (runtime)",path)
+ else
+ report_resolvers("locating list of '%s' (cached)",path)
end
- elseif mode == 'directory' then
- m = m + 1
- if path then
- action(path..'/'..name)
+ end
+ resolvers.locatedatabase(stripped) -- nothing done with result
+ else
+ if trace_locating then
+ if runtime then
+ report_resolvers("skipping list of '%s' (runtime)",path)
else
- action(name)
+ report_resolvers("skipping list of '%s' (cached)",path)
end
end
end
end
end
- action()
if trace_locating then
- logs.report("fileio","%s files found on %s directories with %s uppercase remappings",n,m,r)
+ report_resolvers()
end
end
--- savers, todo
-
-function resolvers.savefiles()
- resolvers.save_data('files')
+local function generate_file_databases()
+ local hashes = instance.hashes
+ for i=1,#hashes do
+ resolvers.methodhandler('generators',hashes[i].tag)
+ end
+ if trace_locating then
+ report_resolvers()
+ end
end
--- A config (optionally) has the paths split in tables. Internally
--- we join them and split them after the expansion has taken place. This
--- is more convenient.
-
---~ local checkedsplit = string.checkedsplit
-
-local cache = { }
-
-local splitter = lpeg.Ct(lpeg.splitat(lpeg.S(os.type == "windows" and ";" or ":;")))
-
-local function split_kpse_path(str) -- beware, this can be either a path or a {specification}
- local found = cache[str]
- if not found then
- if str == "" then
- found = { }
- else
- str = gsub(str,"\\","/")
---~ local split = (find(str,";") and checkedsplit(str,";")) or checkedsplit(str,io.pathseparator)
-local split = lpegmatch(splitter,str)
- found = { }
- for i=1,#split do
- local s = split[i]
- if not find(s,"^{*unset}*") then
- found[#found+1] = s
- end
- end
- if trace_expansions then
- logs.report("fileio","splitting path specification '%s'",str)
- for k=1,#found do
- logs.report("fileio","% 4i: %s",k,found[k])
- end
- end
- cache[str] = found
+local function save_file_databases() -- will become cachers
+ for i=1,#instance.hashes do
+ local hash = instance.hashes[i]
+ local cachename = hash.tag
+ if hash.cache then
+ local content = instance.files[cachename]
+ caches.collapsecontent(content)
+ caches.savecontent(cachename,"files",content)
+ elseif trace_locating then
+ report_resolvers("not saving runtime tree '%s'",cachename)
end
end
- return found
end
-resolvers.split_kpse_path = split_kpse_path
-
-function resolvers.splitconfig()
- for i=1,#instance do
- local c = instance[i]
- for k,v in next, c do
- if type(v) == 'string' then
- local t = split_kpse_path(v)
- if #t > 1 then
- c[k] = t
- end
- end
+local function load_databases()
+ locate_file_databases()
+ if instance.diskcache and not instance.renewcache then
+ load_file_databases()
+ if instance.loaderror then
+ generate_file_databases()
+ save_file_databases()
+ end
+ else
+ generate_file_databases()
+ if instance.renewcache then
+ save_file_databases()
end
end
end
-function resolvers.joinconfig()
- local order = instance.order
- for i=1,#order do
- local c = order[i]
- for k,v in next, c do -- indexed?
- if type(v) == 'table' then
- c[k] = file.join_path(v)
- end
- end
+function resolvers.append_hash(type,tag,name,cache)
+ if trace_locating then
+ report_resolvers("hash '%s' appended",tag)
end
+ insert(instance.hashes, { type = type, tag = tag, name = name, cache = cache } )
end
-function resolvers.split_path(str)
- if type(str) == 'table' then
- return str
- else
- return split_kpse_path(str)
+function resolvers.prepend_hash(type,tag,name,cache)
+ if trace_locating then
+ report_resolvers("hash '%s' prepended",tag)
end
+ insert(instance.hashes, 1, { type = type, tag = tag, name = name, cache = cache } )
end
-function resolvers.join_path(str)
- if type(str) == 'table' then
- return file.join_path(str)
+function resolvers.extend_texmf_var(specification) -- crap, we could better prepend the hash
+-- local t = resolvers.expanded_path_list('TEXMF') -- full expansion
+ local t = resolvers.split_path(resolvers.getenv('TEXMF'))
+ insert(t,1,specification)
+ local newspec = concat(t,";")
+ if instance.environment["TEXMF"] then
+ instance.environment["TEXMF"] = newspec
+ elseif instance.variables["TEXMF"] then
+ instance.variables["TEXMF"] = newspec
else
- return str
+ -- weird
end
+ resolvers.expand_variables()
+ reset_hashes()
+end
+
+function resolvers.generators.tex(specification,tag)
+ instance.files[tag or specification] = resolvers.scan_files(specification)
end
function resolvers.splitexpansions()
local ie = instance.expansions
for k,v in next, ie do
- local t, h, p = { }, { }, split_kpse_path(v)
+ local t, h, p = { }, { }, split_configuration_path(v)
for kk=1,#p do
local vv = p[kk]
if vv ~= "" and not h[vv] then
@@ -9114,222 +10003,22 @@ end
-- end of split/join code
-function resolvers.saveoldconfig()
- resolvers.splitconfig()
- resolvers.save_data('configuration')
- resolvers.joinconfig()
-end
-
-resolvers.configbanner = [[
--- This is a Luatex configuration file created by 'luatools.lua' or
--- 'luatex.exe' directly. For comment, suggestions and questions you can
--- contact the ConTeXt Development Team. This configuration file is
--- not copyrighted. [HH & TH]
-]]
-
-function resolvers.serialize(files)
- -- This version is somewhat optimized for the kind of
- -- tables that we deal with, so it's much faster than
- -- the generic serializer. This makes sense because
- -- luatools and mtxtools are called frequently. Okay,
- -- we pay a small price for properly tabbed tables.
- local t = { }
- local function dump(k,v,m) -- could be moved inline
- if type(v) == 'string' then
- return m .. "['" .. k .. "']='" .. v .. "',"
- elseif #v == 1 then
- return m .. "['" .. k .. "']='" .. v[1] .. "',"
- else
- return m .. "['" .. k .. "']={'" .. concat(v,"','").. "'},"
- end
- end
- t[#t+1] = "return {"
- if instance.sortdata then
- local sortedfiles = sortedkeys(files)
- for i=1,#sortedfiles do
- local k = sortedfiles[i]
- local fk = files[k]
- if type(fk) == 'table' then
- t[#t+1] = "\t['" .. k .. "']={"
- local sortedfk = sortedkeys(fk)
- for j=1,#sortedfk do
- local kk = sortedfk[j]
- t[#t+1] = dump(kk,fk[kk],"\t\t")
- end
- t[#t+1] = "\t},"
- else
- t[#t+1] = dump(k,fk,"\t")
- end
- end
- else
- for k, v in next, files do
- if type(v) == 'table' then
- t[#t+1] = "\t['" .. k .. "']={"
- for kk,vv in next, v do
- t[#t+1] = dump(kk,vv,"\t\t")
- end
- t[#t+1] = "\t},"
- else
- t[#t+1] = dump(k,v,"\t")
- end
- end
- end
- t[#t+1] = "}"
- return concat(t,"\n")
-end
-
-local data_state = { }
+-- we used to have 'files' and 'configurations' so therefore the following
+-- shared function
function resolvers.data_state()
- return data_state or { }
-end
-
-function resolvers.save_data(dataname, makename) -- untested without cache overload
- for cachename, files in next, instance[dataname] do
- local name = (makename or file.join)(cachename,dataname)
- local luaname, lucname = name .. ".lua", name .. ".luc"
- if trace_locating then
- logs.report("fileio","preparing '%s' for '%s'",dataname,cachename)
- end
- for k, v in next, files do
- if type(v) == "table" and #v == 1 then
- files[k] = v[1]
- end
- end
- local data = {
- type = dataname,
- root = cachename,
- version = resolvers.cacheversion,
- date = os.date("%Y-%m-%d"),
- time = os.date("%H:%M:%S"),
- content = files,
- uuid = os.uuid(),
- }
- local ok = io.savedata(luaname,resolvers.serialize(data))
- if ok then
- if trace_locating then
- logs.report("fileio","'%s' saved in '%s'",dataname,luaname)
- end
- if utils.lua.compile(luaname,lucname,false,true) then -- no cleanup but strip
- if trace_locating then
- logs.report("fileio","'%s' compiled to '%s'",dataname,lucname)
- end
- else
- if trace_locating then
- logs.report("fileio","compiling failed for '%s', deleting file '%s'",dataname,lucname)
- end
- os.remove(lucname)
- end
- elseif trace_locating then
- logs.report("fileio","unable to save '%s' in '%s' (access error)",dataname,luaname)
- end
- end
-end
-
-function resolvers.load_data(pathname,dataname,filename,makename) -- untested without cache overload
- filename = ((not filename or (filename == "")) and dataname) or filename
- filename = (makename and makename(dataname,filename)) or file.join(pathname,filename)
- local blob = loadfile(filename .. ".luc") or loadfile(filename .. ".lua")
- if blob then
- local data = blob()
- if data and data.content and data.type == dataname and data.version == resolvers.cacheversion then
- data_state[#data_state+1] = data.uuid
- if trace_locating then
- logs.report("fileio","loading '%s' for '%s' from '%s'",dataname,pathname,filename)
- end
- instance[dataname][pathname] = data.content
- else
- if trace_locating then
- logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename)
- end
- instance[dataname][pathname] = { }
- instance.loaderror = true
- end
- elseif trace_locating then
- logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename)
- end
-end
-
--- some day i'll use the nested approach, but not yet (actually we even drop
--- engine/progname support since we have only luatex now)
---
--- first texmfcnf.lua files are located, next the cached texmf.cnf files
---
--- return {
--- TEXMFBOGUS = 'effe checken of dit werkt',
--- }
-
-function resolvers.resetconfig()
- identify_own()
- instance.configuration, instance.setup, instance.order, instance.loaderror = { }, { }, { }, false
-end
-
-function resolvers.loadnewconfig()
- local luafiles = instance.luafiles
- for i=1,#luafiles do
- local cnf = luafiles[i]
- local pathname = file.dirname(cnf)
- local filename = file.join(pathname,resolvers.luaname)
- local blob = loadfile(filename)
- if blob then
- local data = blob()
- if data then
- if trace_locating then
- logs.report("fileio","loading configuration file '%s'",filename)
- end
- if true then
- -- flatten to variable.progname
- local t = { }
- for k, v in next, data do -- v = progname
- if type(v) == "string" then
- t[k] = v
- else
- for kk, vv in next, v do -- vv = variable
- if type(vv) == "string" then
- t[vv.."."..v] = kk
- end
- end
- end
- end
- instance['setup'][pathname] = t
- else
- instance['setup'][pathname] = data
- end
- else
- if trace_locating then
- logs.report("fileio","skipping configuration file '%s'",filename)
- end
- instance['setup'][pathname] = { }
- instance.loaderror = true
- end
- elseif trace_locating then
- logs.report("fileio","skipping configuration file '%s'",filename)
- end
- instance.order[#instance.order+1] = instance.setup[pathname]
- if instance.loaderror then break end
- end
-end
-
-function resolvers.loadoldconfig()
- if not instance.renewcache then
- local cnffiles = instance.cnffiles
- for i=1,#cnffiles do
- local cnf = cnffiles[i]
- local dname = file.dirname(cnf)
- resolvers.load_data(dname,'configuration')
- instance.order[#instance.order+1] = instance.configuration[dname]
- if instance.loaderror then break end
- end
- end
- resolvers.joinconfig()
+ return caches.contentstate()
end
function resolvers.expand_variables()
local expansions, environment, variables = { }, instance.environment, instance.variables
- local env = resolvers.env
+ local getenv = resolvers.getenv
instance.expansions = expansions
- if instance.engine ~= "" then environment['engine'] = instance.engine end
- if instance.progname ~= "" then environment['progname'] = instance.progname end
+ local engine, progname = instance.engine, instance.progname
+ if type(engine) ~= "string" then instance.engine, engine = "", "" end
+ if type(progname) ~= "string" then instance.progname, progname = "", "" end
+ if engine ~= "" then environment['engine'] = engine end
+ if progname ~= "" then environment['progname'] = progname end
for k,v in next, environment do
local a, b = match(k,"^(%a+)%_(.*)%s*$")
if a and b then
@@ -9338,7 +10027,7 @@ function resolvers.expand_variables()
expansions[k] = v
end
end
- for k,v in next, environment do -- move environment to expansions
+ for k,v in next, environment do -- move environment to expansions (variables are already in there)
if not expansions[k] then expansions[k] = v end
end
for k,v in next, variables do -- move variables to expansions
@@ -9347,7 +10036,7 @@ function resolvers.expand_variables()
local busy = false
local function resolve(a)
busy = true
- return expansions[a] or env(a)
+ return expansions[a] or getenv(a)
end
while true do
busy = false
@@ -9355,6 +10044,8 @@ function resolvers.expand_variables()
local s, n = gsub(v,"%$([%a%d%_%-]+)",resolve)
local s, m = gsub(s,"%$%{([%a%d%_%-]+)%}",resolve)
if n > 0 or m > 0 then
+ s = gsub(s,";+",";")
+ s = gsub(s,";[!{}/\\]+;",";")
expansions[k]= s
end
end
@@ -9391,63 +10082,59 @@ function resolvers.unexpanded_path(str)
return file.join_path(resolvers.unexpanded_path_list(str))
end
-do -- no longer needed
-
- local done = { }
+local done = { }
- function resolvers.reset_extra_path()
- local ep = instance.extra_paths
- if not ep then
- ep, done = { }, { }
- instance.extra_paths = ep
- elseif #ep > 0 then
- instance.lists, done = { }, { }
- end
+function resolvers.reset_extra_path()
+ local ep = instance.extra_paths
+ if not ep then
+ ep, done = { }, { }
+ instance.extra_paths = ep
+ elseif #ep > 0 then
+ instance.lists, done = { }, { }
end
+end
- function resolvers.register_extra_path(paths,subpaths)
- local ep = instance.extra_paths or { }
- local n = #ep
- if paths and paths ~= "" then
- if subpaths and subpaths ~= "" then
- for p in gmatch(paths,"[^,]+") do
- -- we gmatch each step again, not that fast, but used seldom
- for s in gmatch(subpaths,"[^,]+") do
- local ps = p .. "/" .. s
- if not done[ps] then
- ep[#ep+1] = resolvers.clean_path(ps)
- done[ps] = true
- end
- end
- end
- else
- for p in gmatch(paths,"[^,]+") do
- if not done[p] then
- ep[#ep+1] = resolvers.clean_path(p)
- done[p] = true
- end
- end
- end
- elseif subpaths and subpaths ~= "" then
- for i=1,n do
+function resolvers.register_extra_path(paths,subpaths)
+ local ep = instance.extra_paths or { }
+ local n = #ep
+ if paths and paths ~= "" then
+ if subpaths and subpaths ~= "" then
+ for p in gmatch(paths,"[^,]+") do
-- we gmatch each step again, not that fast, but used seldom
for s in gmatch(subpaths,"[^,]+") do
- local ps = ep[i] .. "/" .. s
+ local ps = p .. "/" .. s
if not done[ps] then
ep[#ep+1] = resolvers.clean_path(ps)
done[ps] = true
end
end
end
+ else
+ for p in gmatch(paths,"[^,]+") do
+ if not done[p] then
+ ep[#ep+1] = resolvers.clean_path(p)
+ done[p] = true
+ end
+ end
end
- if #ep > 0 then
- instance.extra_paths = ep -- register paths
- end
- if #ep > n then
- instance.lists = { } -- erase the cache
+ elseif subpaths and subpaths ~= "" then
+ for i=1,n do
+ -- we gmatch each step again, not that fast, but used seldom
+ for s in gmatch(subpaths,"[^,]+") do
+ local ps = ep[i] .. "/" .. s
+ if not done[ps] then
+ ep[#ep+1] = resolvers.clean_path(ps)
+ done[ps] = true
+ end
+ end
end
end
-
+ if #ep > 0 then
+ instance.extra_paths = ep -- register paths
+ end
+ if #ep > n then
+ instance.lists = { } -- erase the cache
+ end
end
local function made_list(instance,list)
@@ -9492,7 +10179,7 @@ function resolvers.clean_path_list(str)
local t = resolvers.expanded_path_list(str)
if t then
for i=1,#t do
- t[i] = file.collapse_path(resolvers.clean_path(t[i]))
+ t[i] = collapse_path(resolvers.clean_path(t[i]))
end
end
return t
@@ -9532,33 +10219,6 @@ function resolvers.expand_path_from_var(str)
return file.join_path(resolvers.expanded_path_list_from_var(str))
end
-function resolvers.format_of_var(str)
- return formats[str] or formats[alternatives[str]] or ''
-end
-function resolvers.format_of_suffix(str)
- return suffixmap[file.extname(str)] or 'tex'
-end
-
-function resolvers.variable_of_format(str)
- return formats[str] or formats[alternatives[str]] or ''
-end
-
-function resolvers.var_of_format_or_suffix(str)
- local v = formats[str]
- if v then
- return v
- end
- v = formats[alternatives[str]]
- if v then
- return v
- end
- v = suffixmap[file.extname(str)]
- if v then
- return formats[isf]
- end
- return ''
-end
-
function resolvers.expand_braces(str) -- output variable and brace expansion of STRING
local ori = resolvers.variable(str)
local pth = expanded_path_from_list(resolvers.split_path(ori))
@@ -9571,9 +10231,9 @@ function resolvers.isreadable.file(name)
local readable = lfs.isfile(name) -- brrr
if trace_detail then
if readable then
- logs.report("fileio","file '%s' is readable",name)
+ report_resolvers("file '%s' is readable",name)
else
- logs.report("fileio","file '%s' is not readable", name)
+ report_resolvers("file '%s' is not readable", name)
end
end
return readable
@@ -9589,10 +10249,10 @@ local function collect_files(names)
for k=1,#names do
local fname = names[k]
if trace_detail then
- logs.report("fileio","checking name '%s'",fname)
+ report_resolvers("checking name '%s'",fname)
end
- local bname = file.basename(fname)
- local dname = file.dirname(fname)
+ local bname = filebasename(fname)
+ local dname = filedirname(fname)
if dname == "" or find(dname,"^%.") then
dname = false
else
@@ -9605,7 +10265,7 @@ local function collect_files(names)
local files = blobpath and instance.files[blobpath]
if files then
if trace_detail then
- logs.report("fileio","deep checking '%s' (%s)",blobpath,bname)
+ report_resolvers("deep checking '%s' (%s)",blobpath,bname)
end
local blobfile = files[bname]
if not blobfile then
@@ -9617,53 +10277,38 @@ local function collect_files(names)
end
end
if blobfile then
+ local blobroot = files.__path__ or blobpath
if type(blobfile) == 'string' then
if not dname or find(blobfile,dname) then
- filelist[#filelist+1] = {
- hash.type,
- file.join(blobpath,blobfile,bname), -- search
- resolvers.concatinators[hash.type](blobpath,blobfile,bname) -- result
- }
+ local kind = hash.type
+ local search = filejoin(blobpath,blobfile,bname)
+ local result = resolvers.concatinators[hash.type](blobroot,blobfile,bname)
+ if trace_detail then
+ report_resolvers("match: kind '%s', search '%s', result '%s'",kind,search,result)
+ end
+ filelist[#filelist+1] = { kind, search, result }
end
else
for kk=1,#blobfile do
local vv = blobfile[kk]
if not dname or find(vv,dname) then
- filelist[#filelist+1] = {
- hash.type,
- file.join(blobpath,vv,bname), -- search
- resolvers.concatinators[hash.type](blobpath,vv,bname) -- result
- }
+ local kind = hash.type
+ local search = filejoin(blobpath,vv,bname)
+ local result = resolvers.concatinators[hash.type](blobroot,vv,bname)
+ if trace_detail then
+ report_resolvers("match: kind '%s', search '%s', result '%s'",kind,search,result)
+ end
+ filelist[#filelist+1] = { kind, search, result }
end
end
end
end
elseif trace_locating then
- logs.report("fileio","no match in '%s' (%s)",blobpath,bname)
+ report_resolvers("no match in '%s' (%s)",blobpath,bname)
end
end
end
- if #filelist > 0 then
- return filelist
- else
- return nil
- end
-end
-
-function resolvers.suffix_of_format(str)
- if suffixes[str] then
- return suffixes[str][1]
- else
- return ""
- end
-end
-
-function resolvers.suffixes_of_format(str)
- if suffixes[str] then
- return suffixes[str]
- else
- return {}
- end
+ return #filelist > 0 and filelist or nil
end
function resolvers.register_in_trees(name)
@@ -9683,27 +10328,28 @@ local function can_be_dir(name) -- can become local
fakepaths[name] = 2 -- no directory
end
end
- return (fakepaths[name] == 1)
+ return fakepaths[name] == 1
end
local function collect_instance_files(filename,collected) -- todo : plugin (scanners, checkers etc)
local result = collected or { }
local stamp = nil
- filename = file.collapse_path(filename)
+ filename = collapse_path(filename)
-- speed up / beware: format problem
if instance.remember then
stamp = filename .. "--" .. instance.engine .. "--" .. instance.progname .. "--" .. instance.format
if instance.found[stamp] then
if trace_locating then
- logs.report("fileio","remembering file '%s'",filename)
+ report_resolvers("remembering file '%s'",filename)
end
+ resolvers.register_in_trees(filename) -- for tracing used files
return instance.found[stamp]
end
end
if not dangerous[instance.format or "?"] then
if resolvers.isreadable.file(filename) then
if trace_detail then
- logs.report("fileio","file '%s' found directly",filename)
+ report_resolvers("file '%s' found directly",filename)
end
instance.found[stamp] = { filename }
return { filename }
@@ -9711,36 +10357,39 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
end
if find(filename,'%*') then
if trace_locating then
- logs.report("fileio","checking wildcard '%s'", filename)
+ report_resolvers("checking wildcard '%s'", filename)
end
result = resolvers.find_wildcard_files(filename)
elseif file.is_qualified_path(filename) then
if resolvers.isreadable.file(filename) then
if trace_locating then
- logs.report("fileio","qualified name '%s'", filename)
+ report_resolvers("qualified name '%s'", filename)
end
result = { filename }
else
- local forcedname, ok, suffix = "", false, file.extname(filename)
+ local forcedname, ok, suffix = "", false, fileextname(filename)
if suffix == "" then -- why
if instance.format == "" then
forcedname = filename .. ".tex"
if resolvers.isreadable.file(forcedname) then
if trace_locating then
- logs.report("fileio","no suffix, forcing standard filetype 'tex'")
+ report_resolvers("no suffix, forcing standard filetype 'tex'")
end
result, ok = { forcedname }, true
end
else
- local suffixes = resolvers.suffixes_of_format(instance.format)
- for _, s in next, suffixes do
- forcedname = filename .. "." .. s
- if resolvers.isreadable.file(forcedname) then
- if trace_locating then
- logs.report("fileio","no suffix, forcing format filetype '%s'", s)
+ local format_suffixes = suffixes[instance.format]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s = format_suffixes[i]
+ forcedname = filename .. "." .. s
+ if resolvers.isreadable.file(forcedname) then
+ if trace_locating then
+ report_resolvers("no suffix, forcing format filetype '%s'", s)
+ end
+ result, ok = { forcedname }, true
+ break
end
- result, ok = { forcedname }, true
- break
end
end
end
@@ -9748,7 +10397,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
if not ok and suffix ~= "" then
-- try to find in tree (no suffix manipulation), here we search for the
-- matching last part of the name
- local basename = file.basename(filename)
+ local basename = filebasename(filename)
local pattern = gsub(filename .. "$","([%.%-])","%%%1")
local savedformat = instance.format
local format = savedformat or ""
@@ -9789,12 +10438,13 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
-- end
end
if not ok and trace_locating then
- logs.report("fileio","qualified name '%s'", filename)
+ report_resolvers("qualified name '%s'", filename)
end
end
else
-- search spec
- local filetype, extra, done, wantedfiles, ext = '', nil, false, { }, file.extname(filename)
+ local filetype, extra, done, wantedfiles, ext = '', nil, false, { }, fileextname(filename)
+ -- tricky as filename can be bla.1.2.3
if ext == "" then
if not instance.force_suffixes then
wantedfiles[#wantedfiles+1] = filename
@@ -9803,29 +10453,31 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
wantedfiles[#wantedfiles+1] = filename
end
if instance.format == "" then
- if ext == "" then
+ if ext == "" or not suffixmap[ext] then
local forcedname = filename .. '.tex'
wantedfiles[#wantedfiles+1] = forcedname
filetype = resolvers.format_of_suffix(forcedname)
if trace_locating then
- logs.report("fileio","forcing filetype '%s'",filetype)
+ report_resolvers("forcing filetype '%s'",filetype)
end
else
filetype = resolvers.format_of_suffix(filename)
if trace_locating then
- logs.report("fileio","using suffix based filetype '%s'",filetype)
+ report_resolvers("using suffix based filetype '%s'",filetype)
end
end
else
- if ext == "" then
- local suffixes = resolvers.suffixes_of_format(instance.format)
- for _, s in next, suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. s
+ if ext == "" or not suffixmap[ext] then
+ local format_suffixes = suffixes[instance.format]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ end
end
end
filetype = instance.format
if trace_locating then
- logs.report("fileio","using given filetype '%s'",filetype)
+ report_resolvers("using given filetype '%s'",filetype)
end
end
local typespec = resolvers.variable_of_format(filetype)
@@ -9833,13 +10485,13 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
if not pathlist or #pathlist == 0 then
-- no pathlist, access check only / todo == wildcard
if trace_detail then
- logs.report("fileio","checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ report_resolvers("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
end
for k=1,#wantedfiles do
local fname = wantedfiles[k]
if fname and resolvers.isreadable.file(fname) then
filename, done = fname, true
- result[#result+1] = file.join('.',fname)
+ result[#result+1] = filejoin('.',fname)
break
end
end
@@ -9857,11 +10509,11 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
local dirlist = { }
if filelist then
for i=1,#filelist do
- dirlist[i] = file.dirname(filelist[i][2]) .. "/"
+ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
end
end
if trace_detail then
- logs.report("fileio","checking filename '%s'",filename)
+ report_resolvers("checking filename '%s'",filename)
end
-- a bit messy ... esp the doscan setting here
local doscan
@@ -9884,7 +10536,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
expression = gsub(expression,"//", '/.-/') -- not ok for /// but harmless
expression = "^" .. expression .. "$"
if trace_detail then
- logs.report("fileio","using pattern '%s' for path '%s'",expression,pathname)
+ report_resolvers("using pattern '%s' for path '%s'",expression,pathname)
end
for k=1,#filelist do
local fl = filelist[k]
@@ -9893,20 +10545,19 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
if find(d,expression) then
--- todo, test for readable
result[#result+1] = fl[3]
- resolvers.register_in_trees(f) -- for tracing used files
done = true
if instance.allresults then
if trace_detail then
- logs.report("fileio","match in hash for file '%s' on path '%s', continue scanning",f,d)
+ report_resolvers("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
end
else
if trace_detail then
- logs.report("fileio","match in hash for file '%s' on path '%s', quit scanning",f,d)
+ report_resolvers("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
end
break
end
elseif trace_detail then
- logs.report("fileio","no match in hash for file '%s' on path '%s'",f,d)
+ report_resolvers("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
end
end
end
@@ -9919,10 +10570,10 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
if can_be_dir(ppname) then
for k=1,#wantedfiles do
local w = wantedfiles[k]
- local fname = file.join(ppname,w)
+ local fname = filejoin(ppname,w)
if resolvers.isreadable.file(fname) then
if trace_detail then
- logs.report("fileio","found '%s' by scanning",fname)
+ report_resolvers("found '%s' by scanning",fname)
end
result[#result+1] = fname
done = true
@@ -9936,14 +10587,16 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
end
end
if not done and doscan then
- -- todo: slow path scanning
+ -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF
end
if done and not instance.allresults then break end
end
end
end
for k=1,#result do
- result[k] = file.collapse_path(result[k])
+ local rk = collapse_path(result[k])
+ result[k] = rk
+ resolvers.register_in_trees(rk) -- for tracing used files
end
if instance.remember then
instance.found[stamp] = result
@@ -9953,7 +10606,7 @@ end
if not resolvers.concatinators then resolvers.concatinators = { } end
-resolvers.concatinators.tex = file.join
+resolvers.concatinators.tex = filejoin
resolvers.concatinators.file = resolvers.concatinators.tex
function resolvers.find_files(filename,filetype,mustexist)
@@ -9980,8 +10633,14 @@ function resolvers.find_file(filename,filetype,mustexist)
return (resolvers.find_files(filename,filetype,mustexist)[1] or "")
end
+function resolvers.find_path(filename,filetype)
+ local path = resolvers.find_files(filename,filetype)[1] or ""
+ -- todo return current path
+ return file.dirname(path)
+end
+
function resolvers.find_given_files(filename)
- local bname, result = file.basename(filename), { }
+ local bname, result = filebasename(filename), { }
local hashes = instance.hashes
for k=1,#hashes do
local hash = hashes[k]
@@ -10038,9 +10697,9 @@ local function doit(path,blist,bname,tag,kind,result,allresults)
return done
end
-function resolvers.find_wildcard_files(filename) -- todo: remap:
+function resolvers.find_wildcard_files(filename) -- todo: remap: and lpeg
local result = { }
- local bname, dname = file.basename(filename), file.dirname(filename)
+ local bname, dname = filebasename(filename), filedirname(filename)
local path = gsub(dname,"^*/","")
path = gsub(path,"*",".*")
path = gsub(path,"-","%%-")
@@ -10093,24 +10752,24 @@ end
function resolvers.load(option)
statistics.starttiming(instance)
- resolvers.resetconfig()
- resolvers.identify_cnf()
- resolvers.load_lua() -- will become the new method
- resolvers.expand_variables()
- resolvers.load_cnf() -- will be skipped when we have a lua file
+ identify_configuration_files()
+ load_configuration_files()
+ collapse_configuration_data()
resolvers.expand_variables()
if option ~= "nofiles" then
- resolvers.load_hash()
+ load_databases()
resolvers.automount()
end
statistics.stoptiming(instance)
+ local files = instance.files
+ return files and next(files) and true
end
function resolvers.for_files(command, files, filetype, mustexist)
if files and #files > 0 then
local function report(str)
if trace_locating then
- logs.report("fileio",str) -- has already verbose
+ report_resolvers(str) -- has already verbose
else
print(str)
end
@@ -10158,51 +10817,6 @@ function resolvers.register_file(files, name, path)
end
end
-function resolvers.splitmethod(filename)
- if not filename then
- return { } -- safeguard
- elseif type(filename) == "table" then
- return filename -- already split
- elseif not find(filename,"://") then
- return { scheme="file", path = filename, original=filename } -- quick hack
- else
- return url.hashed(filename)
- end
-end
-
-function table.sequenced(t,sep) -- temp here
- local s = { }
- for k, v in next, t do -- indexed?
- s[#s+1] = k .. "=" .. tostring(v)
- end
- return concat(s, sep or " | ")
-end
-
-function resolvers.methodhandler(what, filename, filetype) -- ...
- filename = file.collapse_path(filename)
- local specification = (type(filename) == "string" and resolvers.splitmethod(filename)) or filename -- no or { }, let it bomb
- local scheme = specification.scheme
- if resolvers[what][scheme] then
- if trace_locating then
- logs.report("fileio","handler '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification))
- end
- return resolvers[what][scheme](filename,filetype) -- todo: specification
- else
- return resolvers[what].tex(filename,filetype) -- todo: specification
- end
-end
-
-function resolvers.clean_path(str)
- if str then
- str = gsub(str,"\\","/")
- str = gsub(str,"^!+","")
- str = gsub(str,"^~",resolvers.homedir)
- return str
- else
- return nil
- end
-end
-
function resolvers.do_with_path(name,func)
local pathlist = resolvers.expanded_path_list(name)
for i=1,#pathlist do
@@ -10214,45 +10828,13 @@ function resolvers.do_with_var(name,func)
func(expanded_var(name))
end
-function resolvers.with_files(pattern,handle)
- local hashes = instance.hashes
- for i=1,#hashes do
- local hash = hashes[i]
- local blobpath = hash.tag
- local blobtype = hash.type
- if blobpath then
- local files = instance.files[blobpath]
- if files then
- for k,v in next, files do
- if find(k,"^remap:") then
- k = files[k]
- v = files[k] -- chained
- end
- if find(k,pattern) then
- if type(v) == "string" then
- handle(blobtype,blobpath,v,k)
- else
- for _,vv in next, v do -- indexed
- handle(blobtype,blobpath,vv,k)
- end
- end
- end
- end
- end
- end
- end
-end
-
function resolvers.locate_format(name)
- local barename, fmtname = gsub(name,"%.%a+$",""), ""
- if resolvers.usecache then
- local path = file.join(caches.setpath("formats")) -- maybe platform
- fmtname = file.join(path,barename..".fmt") or ""
- end
+ local barename = gsub(name,"%.%a+$","")
+ local fmtname = caches.getfirstreadablefile(barename..".fmt","formats") or ""
if fmtname == "" then
fmtname = resolvers.find_files(barename..".fmt")[1] or ""
+ fmtname = resolvers.clean_path(fmtname)
end
- fmtname = resolvers.clean_path(fmtname)
if fmtname ~= "" then
local barename = file.removesuffix(fmtname)
local luaname, lucname, luiname = barename .. ".lua", barename .. ".luc", barename .. ".lui"
@@ -10277,196 +10859,48 @@ function resolvers.boolean_variable(str,default)
end
end
-texconfig.kpse_init = false
-
-kpse = { original = kpse } setmetatable(kpse, { __index = function(k,v) return resolvers[v] end } )
-
--- for a while
-
-input = resolvers
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['data-tmp'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-<p>This module deals with caching data. It sets up the paths and
-implements loaders and savers for tables. Best is to set the
-following variable. When not set, the usual paths will be
-checked. Personally I prefer the (users) temporary path.</p>
-
-</code>
-TEXMFCACHE=$TMP;$TEMP;$TMPDIR;$TEMPDIR;$HOME;$TEXMFVAR;$VARTEXMF;.
-</code>
-
-<p>Currently we do no locking when we write files. This is no real
-problem because most caching involves fonts and the chance of them
-being written at the same time is small. We also need to extend
-luatools with a recache feature.</p>
---ldx]]--
-
-local format, lower, gsub = string.format, string.lower, string.gsub
-
-local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) -- not used yet
-
-caches = caches or { }
-
-caches.path = caches.path or nil
-caches.base = caches.base or "luatex-cache"
-caches.more = caches.more or "context"
-caches.direct = false -- true is faster but may need huge amounts of memory
-caches.tree = false
-caches.paths = caches.paths or nil
-caches.force = false
-caches.defaults = { "TEXMFCACHE", "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" }
-
-function caches.temp()
- local cachepath = nil
- local function check(list,isenv)
- if not cachepath then
- for k=1,#list do
- local v = list[k]
- cachepath = (isenv and (os.env[v] or "")) or v or ""
- if cachepath == "" then
- -- next
- else
- cachepath = resolvers.clean_path(cachepath)
- if lfs.isdir(cachepath) and file.iswritable(cachepath) then -- lfs.attributes(cachepath,"mode") == "directory"
- break
- elseif caches.force or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
- dir.mkdirs(cachepath)
- if lfs.isdir(cachepath) and file.iswritable(cachepath) then
- break
+function resolvers.with_files(pattern,handle,before,after) -- can be a nice iterator instead
+ local instance = resolvers.instance
+ local hashes = instance.hashes
+ for i=1,#hashes do
+ local hash = hashes[i]
+ local blobtype = hash.type
+ local blobpath = hash.tag
+ if blobpath then
+ if before then
+ before(blobtype,blobpath,pattern)
+ end
+ local files = instance.files[blobpath]
+ local total, checked, done = 0, 0, 0
+ if files then
+ for k,v in next, files do
+ total = total + 1
+ if find(k,"^remap:") then
+ k = files[k]
+ v = k -- files[k] -- chained
+ end
+ if find(k,pattern) then
+ if type(v) == "string" then
+ checked = checked + 1
+ if handle(blobtype,blobpath,v,k) then
+ done = done + 1
+ end
+ else
+ checked = checked + #v
+ for i=1,#v do
+ if handle(blobtype,blobpath,v[i],k) then
+ done = done + 1
+ end
+ end
end
end
end
- cachepath = nil
+ end
+ if after then
+ after(blobtype,blobpath,pattern,total,checked,done)
end
end
end
- check(resolvers.clean_path_list("TEXMFCACHE") or { })
- check(caches.defaults,true)
- if not cachepath then
- print("\nfatal error: there is no valid (writable) cache path defined\n")
- os.exit()
- elseif not lfs.isdir(cachepath) then -- lfs.attributes(cachepath,"mode") ~= "directory"
- print(format("\nfatal error: cache path %s is not a directory\n",cachepath))
- os.exit()
- end
- cachepath = file.collapse_path(cachepath)
- function caches.temp()
- return cachepath
- end
- return cachepath
-end
-
-function caches.configpath()
- return table.concat(resolvers.instance.cnffiles,";")
-end
-
-function caches.hashed(tree)
- return md5.hex(gsub(lower(tree),"[\\\/]+","/"))
-end
-
-function caches.treehash()
- local tree = caches.configpath()
- if not tree or tree == "" then
- return false
- else
- return caches.hashed(tree)
- end
-end
-
-function caches.setpath(...)
- if not caches.path then
- if not caches.path then
- caches.path = caches.temp()
- end
- caches.path = resolvers.clean_path(caches.path) -- to be sure
- caches.tree = caches.tree or caches.treehash()
- if caches.tree then
- caches.path = dir.mkdirs(caches.path,caches.base,caches.more,caches.tree)
- else
- caches.path = dir.mkdirs(caches.path,caches.base,caches.more)
- end
- end
- if not caches.path then
- caches.path = '.'
- end
- caches.path = resolvers.clean_path(caches.path)
- local dirs = { ... }
- if #dirs > 0 then
- local pth = dir.mkdirs(caches.path,...)
- return pth
- end
- caches.path = dir.expand_name(caches.path)
- return caches.path
-end
-
-function caches.definepath(category,subcategory)
- return function()
- return caches.setpath(category,subcategory)
- end
-end
-
-function caches.setluanames(path,name)
- return path .. "/" .. name .. ".tma", path .. "/" .. name .. ".tmc"
-end
-
-function caches.loaddata(path,name)
- local tmaname, tmcname = caches.setluanames(path,name)
- local loader = loadfile(tmcname) or loadfile(tmaname)
- if loader then
- loader = loader()
- collectgarbage("step")
- return loader
- else
- return false
- end
-end
-
---~ function caches.loaddata(path,name)
---~ local tmaname, tmcname = caches.setluanames(path,name)
---~ return dofile(tmcname) or dofile(tmaname)
---~ end
-
-function caches.iswritable(filepath,filename)
- local tmaname, tmcname = caches.setluanames(filepath,filename)
- return file.iswritable(tmaname)
-end
-
-function caches.savedata(filepath,filename,data,raw)
- local tmaname, tmcname = caches.setluanames(filepath,filename)
- local reduce, simplify = true, true
- if raw then
- reduce, simplify = false, false
- end
- data.cache_uuid = os.uuid()
- if caches.direct then
- file.savedata(tmaname, table.serialize(data,'return',false,true,false)) -- no hex
- else
- table.tofile(tmaname, data,'return',false,true,false) -- maybe not the last true
- end
- local cleanup = resolvers.boolean_variable("PURGECACHE", false)
- local strip = resolvers.boolean_variable("LUACSTRIP", true)
- utils.lua.compile(tmaname, tmcname, cleanup, strip)
-end
-
--- here we use the cache for format loading (texconfig.[formatname|jobname])
-
---~ if tex and texconfig and texconfig.formatname and texconfig.formatname == "" then
-if tex and texconfig and (not texconfig.formatname or texconfig.formatname == "") and input and resolvers.instance then
- if not texconfig.luaname then texconfig.luaname = "cont-en.lua" end -- or luc
- texconfig.formatname = caches.setpath("formats") .. "/" .. gsub(texconfig.luaname,"%.lu.$",".fmt")
end
@@ -10474,7 +10908,7 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-res'] = {
+if not modules then modules = { } end modules ['data-pre'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -10482,14 +10916,15 @@ if not modules then modules = { } end modules ['data-res'] = {
license = "see context related readme files"
}
---~ print(resolvers.resolve("abc env:tmp file:cont-en.tex path:cont-en.tex full:cont-en.tex rel:zapf/one/p-chars.tex"))
local upper, lower, gsub = string.upper, string.lower, string.gsub
local prefixes = { }
-prefixes.environment = function(str)
- return resolvers.clean_path(os.getenv(str) or os.getenv(upper(str)) or os.getenv(lower(str)) or "")
+local getenv = resolvers.getenv
+
+prefixes.environment = function(str) -- getenv is case insensitive anyway
+ return resolvers.clean_path(getenv(str) or getenv(upper(str)) or getenv(lower(str)) or "")
end
prefixes.relative = function(str,n)
@@ -10627,7 +11062,7 @@ end -- of closure
do -- create closure to overcome 200 locals limit
if not modules then modules = { } end modules ['data-con'] = {
- version = 1.001,
+ version = 1.100,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
@@ -10657,46 +11092,58 @@ containers = containers or { }
containers.usecache = true
+local report_cache = logs.new("cache")
+
local function report(container,tag,name)
if trace_cache or trace_containers then
- logs.report(format("%s cache",container.subcategory),"%s: %s",tag,name or 'invalid')
+ report_cache("container: %s, tag: %s, name: %s",container.subcategory,tag,name or 'invalid')
end
end
local allocated = { }
--- tracing
+local mt = {
+ __index = function(t,k)
+ if k == "writable" then
+ local writable = caches.getwritablepath(t.category,t.subcategory) or { "." }
+ t.writable = writable
+ return writable
+ elseif k == "readables" then
+ local readables = caches.getreadablepaths(t.category,t.subcategory) or { "." }
+ t.readables = readables
+ return readables
+ end
+ end
+}
function containers.define(category, subcategory, version, enabled)
- return function()
- if category and subcategory then
- local c = allocated[category]
- if not c then
- c = { }
- allocated[category] = c
- end
- local s = c[subcategory]
- if not s then
- s = {
- category = category,
- subcategory = subcategory,
- storage = { },
- enabled = enabled,
- version = version or 1.000,
- trace = false,
- path = caches and caches.setpath and caches.setpath(category,subcategory),
- }
- c[subcategory] = s
- end
- return s
- else
- return nil
+ if category and subcategory then
+ local c = allocated[category]
+ if not c then
+ c = { }
+ allocated[category] = c
+ end
+ local s = c[subcategory]
+ if not s then
+ s = {
+ category = category,
+ subcategory = subcategory,
+ storage = { },
+ enabled = enabled,
+ version = version or math.pi, -- after all, this is TeX
+ trace = false,
+ -- writable = caches.getwritablepath and caches.getwritablepath (category,subcategory) or { "." },
+ -- readables = caches.getreadablepaths and caches.getreadablepaths(category,subcategory) or { "." },
+ }
+ setmetatable(s,mt)
+ c[subcategory] = s
end
+ return s
end
end
function containers.is_usable(container, name)
- return container.enabled and caches and caches.iswritable(container.path, name)
+ return container.enabled and caches and caches.iswritable(container.writable, name)
end
function containers.is_valid(container, name)
@@ -10709,18 +11156,20 @@ function containers.is_valid(container, name)
end
function containers.read(container,name)
- if container.enabled and caches and not container.storage[name] and containers.usecache then
- container.storage[name] = caches.loaddata(container.path,name)
- if containers.is_valid(container,name) then
+ local storage = container.storage
+ local stored = storage[name]
+ if not stored and container.enabled and caches and containers.usecache then
+ stored = caches.loaddata(container.readables,name)
+ if stored and stored.cache_version == container.version then
report(container,"loaded",name)
else
- container.storage[name] = nil
+ stored = nil
end
- end
- if container.storage[name] then
+ storage[name] = stored
+ elseif stored then
report(container,"reusing",name)
end
- return container.storage[name]
+ return stored
end
function containers.write(container, name, data)
@@ -10729,7 +11178,7 @@ function containers.write(container, name, data)
if container.enabled and caches then
local unique, shared = data.unique, data.shared
data.unique, data.shared = nil, nil
- caches.savedata(container.path, name, data)
+ caches.savedata(container.writable, name, data)
report(container,"saved",name)
data.unique, data.shared = unique, shared
end
@@ -10764,41 +11213,7 @@ local format, lower, gsub, find = string.format, string.lower, string.gsub, stri
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
--- since we want to use the cache instead of the tree, we will now
--- reimplement the saver.
-
-local save_data = resolvers.save_data
-local load_data = resolvers.load_data
-
-resolvers.cachepath = nil -- public, for tracing
-resolvers.usecache = true -- public, for tracing
-
-function resolvers.save_data(dataname)
- save_data(dataname, function(cachename,dataname)
- resolvers.usecache = not toboolean(resolvers.expansion("CACHEINTDS") or "false",true)
- if resolvers.usecache then
- resolvers.cachepath = resolvers.cachepath or caches.definepath("trees")
- return file.join(resolvers.cachepath(),caches.hashed(cachename))
- else
- return file.join(cachename,dataname)
- end
- end)
-end
-
-function resolvers.load_data(pathname,dataname,filename)
- load_data(pathname,dataname,filename,function(dataname,filename)
- resolvers.usecache = not toboolean(resolvers.expansion("CACHEINTDS") or "false",true)
- if resolvers.usecache then
- resolvers.cachepath = resolvers.cachepath or caches.definepath("trees")
- return file.join(resolvers.cachepath(),caches.hashed(pathname))
- else
- if not filename or (filename == "") then
- filename = dataname
- end
- return file.join(pathname,filename)
- end
- end)
-end
+local report_resolvers = logs.new("resolvers")
-- we will make a better format, maybe something xml or just text or lua
@@ -10807,7 +11222,7 @@ resolvers.automounted = resolvers.automounted or { }
function resolvers.automount(usecache)
local mountpaths = resolvers.clean_path_list(resolvers.expansion('TEXMFMOUNT'))
if (not mountpaths or #mountpaths == 0) and usecache then
- mountpaths = { caches.setpath("mount") }
+ mountpaths = caches.getreadablepaths("mount")
end
if mountpaths and #mountpaths > 0 then
statistics.starttiming(resolvers.instance)
@@ -10821,7 +11236,7 @@ function resolvers.automount(usecache)
-- skip
elseif find(line,"^zip://") then
if trace_locating then
- logs.report("fileio","mounting %s",line)
+ report_resolvers("mounting %s",line)
end
table.insert(resolvers.automounted,line)
resolvers.usezipfile(line)
@@ -10837,8 +11252,8 @@ end
-- status info
-statistics.register("used config path", function() return caches.configpath() end)
-statistics.register("used cache path", function() return caches.temp() or "?" end)
+statistics.register("used config file", function() return caches.configfiles() end)
+statistics.register("used cache path", function() return caches.usedpaths() end)
-- experiment (code will move)
@@ -10866,11 +11281,11 @@ function statistics.check_fmt_status(texname)
local sourcehash = md5.hex(io.loaddata(resolvers.find_file(luv.sourcefile)) or "unknown")
local luvbanner = luv.enginebanner or "?"
if luvbanner ~= enginebanner then
- return string.format("engine mismatch (luv:%s <> bin:%s)",luvbanner,enginebanner)
+ return format("engine mismatch (luv: %s <> bin: %s)",luvbanner,enginebanner)
end
local luvhash = luv.sourcehash or "?"
if luvhash ~= sourcehash then
- return string.format("source mismatch (luv:%s <> bin:%s)",luvhash,sourcehash)
+ return format("source mismatch (luv: %s <> bin: %s)",luvhash,sourcehash)
end
else
return "invalid status file"
@@ -10900,6 +11315,8 @@ local unpack = unpack or table.unpack
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local report_resolvers = logs.new("resolvers")
+
-- zip:///oeps.zip?name=bla/bla.tex
-- zip:///oeps.zip?tree=tex/texmf-local
-- zip:///texmf.zip?tree=/tex/texmf
@@ -10950,16 +11367,16 @@ function locators.zip(specification) -- where is this used? startup zips (untest
local zfile = zip.openarchive(name) -- tricky, could be in to be initialized tree
if trace_locating then
if zfile then
- logs.report("fileio","zip locator, archive '%s' found",specification.original)
+ report_resolvers("zip locator, archive '%s' found",specification.original)
else
- logs.report("fileio","zip locator, archive '%s' not found",specification.original)
+ report_resolvers("zip locator, archive '%s' not found",specification.original)
end
end
end
function hashers.zip(tag,name)
if trace_locating then
- logs.report("fileio","loading zip file '%s' as '%s'",name,tag)
+ report_resolvers("loading zip file '%s' as '%s'",name,tag)
end
resolvers.usezipfile(format("%s?tree=%s",tag,name))
end
@@ -10984,25 +11401,25 @@ function finders.zip(specification,filetype)
local zfile = zip.openarchive(specification.path)
if zfile then
if trace_locating then
- logs.report("fileio","zip finder, archive '%s' found",specification.path)
+ report_resolvers("zip finder, archive '%s' found",specification.path)
end
local dfile = zfile:open(q.name)
if dfile then
dfile = zfile:close()
if trace_locating then
- logs.report("fileio","zip finder, file '%s' found",q.name)
+ report_resolvers("zip finder, file '%s' found",q.name)
end
return specification.original
elseif trace_locating then
- logs.report("fileio","zip finder, file '%s' not found",q.name)
+ report_resolvers("zip finder, file '%s' not found",q.name)
end
elseif trace_locating then
- logs.report("fileio","zip finder, unknown archive '%s'",specification.path)
+ report_resolvers("zip finder, unknown archive '%s'",specification.path)
end
end
end
if trace_locating then
- logs.report("fileio","zip finder, '%s' not found",filename)
+ report_resolvers("zip finder, '%s' not found",filename)
end
return unpack(finders.notfound)
end
@@ -11015,25 +11432,25 @@ function openers.zip(specification)
local zfile = zip.openarchive(zipspecification.path)
if zfile then
if trace_locating then
- logs.report("fileio","zip opener, archive '%s' opened",zipspecification.path)
+ report_resolvers("zip opener, archive '%s' opened",zipspecification.path)
end
local dfile = zfile:open(q.name)
if dfile then
logs.show_open(specification)
if trace_locating then
- logs.report("fileio","zip opener, file '%s' found",q.name)
+ report_resolvers("zip opener, file '%s' found",q.name)
end
return openers.text_opener(specification,dfile,'zip')
elseif trace_locating then
- logs.report("fileio","zip opener, file '%s' not found",q.name)
+ report_resolvers("zip opener, file '%s' not found",q.name)
end
elseif trace_locating then
- logs.report("fileio","zip opener, unknown archive '%s'",zipspecification.path)
+ report_resolvers("zip opener, unknown archive '%s'",zipspecification.path)
end
end
end
if trace_locating then
- logs.report("fileio","zip opener, '%s' not found",filename)
+ report_resolvers("zip opener, '%s' not found",filename)
end
return unpack(openers.notfound)
end
@@ -11046,27 +11463,27 @@ function loaders.zip(specification)
local zfile = zip.openarchive(specification.path)
if zfile then
if trace_locating then
- logs.report("fileio","zip loader, archive '%s' opened",specification.path)
+ report_resolvers("zip loader, archive '%s' opened",specification.path)
end
local dfile = zfile:open(q.name)
if dfile then
logs.show_load(filename)
if trace_locating then
- logs.report("fileio","zip loader, file '%s' loaded",filename)
+ report_resolvers("zip loader, file '%s' loaded",filename)
end
local s = dfile:read("*all")
dfile:close()
return true, s, #s
elseif trace_locating then
- logs.report("fileio","zip loader, file '%s' not found",q.name)
+ report_resolvers("zip loader, file '%s' not found",q.name)
end
elseif trace_locating then
- logs.report("fileio","zip loader, unknown archive '%s'",specification.path)
+ report_resolvers("zip loader, unknown archive '%s'",specification.path)
end
end
end
if trace_locating then
- logs.report("fileio","zip loader, '%s' not found",filename)
+ report_resolvers("zip loader, '%s' not found",filename)
end
return unpack(openers.notfound)
end
@@ -11084,7 +11501,7 @@ function resolvers.usezipfile(zipname)
if z then
local instance = resolvers.instance
if trace_locating then
- logs.report("fileio","zip registering, registering archive '%s'",zipname)
+ report_resolvers("zip registering, registering archive '%s'",zipname)
end
statistics.starttiming(instance)
resolvers.prepend_hash('zip',zipname,zipfile)
@@ -11093,10 +11510,10 @@ function resolvers.usezipfile(zipname)
instance.files[zipname] = resolvers.register_zip_file(z,tree or "")
statistics.stoptiming(instance)
elseif trace_locating then
- logs.report("fileio","zip registering, unknown archive '%s'",zipname)
+ report_resolvers("zip registering, unknown archive '%s'",zipname)
end
elseif trace_locating then
- logs.report("fileio","zip registering, '%s' not found",zipname)
+ report_resolvers("zip registering, '%s' not found",zipname)
end
end
@@ -11108,7 +11525,7 @@ function resolvers.register_zip_file(z,tree)
filter = format("^%s/(.+)/(.-)$",tree)
end
if trace_locating then
- logs.report("fileio","zip registering, using filter '%s'",filter)
+ report_resolvers("zip registering, using filter '%s'",filter)
end
local register, n = resolvers.register_file, 0
for i in z:files() do
@@ -11125,7 +11542,7 @@ function resolvers.register_zip_file(z,tree)
n = n + 1
end
end
- logs.report("fileio","zip registering, %s files registered",n)
+ report_resolvers("zip registering, %s files registered",n)
return files
end
@@ -11134,6 +11551,93 @@ end -- of closure
do -- create closure to overcome 200 locals limit
+if not modules then modules = { } end modules ['data-tre'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- \input tree://oeps1/**/oeps.tex
+
+local find, gsub, format = string.find, string.gsub, string.format
+local unpack = unpack or table.unpack
+
+local report_resolvers = logs.new("resolvers")
+
+local done, found, notfound = { }, { }, resolvers.finders.notfound
+
+function resolvers.finders.tree(specification,filetype)
+ local fnd = found[specification]
+ if not fnd then
+ local spec = resolvers.splitmethod(specification).path or ""
+ if spec ~= "" then
+ local path, name = file.dirname(spec), file.basename(spec)
+ if path == "" then path = "." end
+ local hash = done[path]
+ if not hash then
+ local pattern = path .. "/*" -- we will use the proper splitter
+ hash = dir.glob(pattern)
+ done[path] = hash
+ end
+ local pattern = "/" .. gsub(name,"([%.%-%+])", "%%%1") .. "$"
+ for k=1,#hash do
+ local v = hash[k]
+ if find(v,pattern) then
+ found[specification] = v
+ return v
+ end
+ end
+ end
+ fnd = unpack(notfound) -- unpack ? why not just notfound[1]
+ found[specification] = fnd
+ end
+ return fnd
+end
+
+function resolvers.locators.tree(specification)
+ local spec = resolvers.splitmethod(specification)
+ local path = spec.path
+ if path ~= '' and lfs.isdir(path) then
+ if trace_locating then
+ report_resolvers("tree locator '%s' found (%s)",path,specification)
+ end
+ resolvers.append_hash('tree',specification,path,false) -- don't cache
+ elseif trace_locating then
+ report_resolvers("tree locator '%s' not found",path)
+ end
+end
+
+function resolvers.hashers.tree(tag,name)
+ if trace_locating then
+ report_resolvers("analysing tree '%s' as '%s'",name,tag)
+ end
+ -- todo: maybe share with done above
+ local spec = resolvers.splitmethod(tag)
+ local path = spec.path
+ resolvers.generators.tex(path,tag) -- we share this with the normal tree analyzer
+end
+
+function resolvers.generators.tree(tag)
+ local spec = resolvers.splitmethod(tag)
+ local path = spec.path
+ resolvers.generators.tex(path,tag) -- we share this with the normal tree analyzer
+end
+
+function resolvers.concatinators.tree(tag,path,name)
+ return file.join(tag,path,name)
+end
+
+resolvers.isreadable.tree = file.isreadable
+resolvers.openers.tree = resolvers.openers.generic
+resolvers.loaders.tree = resolvers.loaders.generic
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
if not modules then modules = { } end modules ['data-crl'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -11142,32 +11646,31 @@ if not modules then modules = { } end modules ['data-crl'] = {
license = "see context related readme files"
}
-local gsub = string.gsub
+-- this one is replaced by data-sch.lua --
curl = curl or { }
-curl.cached = { }
-curl.cachepath = caches.definepath("curl")
-
+local gsub = string.gsub
local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
-function curl.fetch(protocol, name)
- local cachename = curl.cachepath() .. "/" .. gsub(name,"[^%a%d%.]+","-")
--- cachename = gsub(cachename,"[\\/]", io.fileseparator)
- cachename = gsub(cachename,"[\\]", "/") -- cleanup
- if not curl.cached[name] then
+local cached = { }
+
+function curl.fetch(protocol, name) -- todo: use socket library
+ local cleanname = gsub(name,"[^%a%d%.]+","-")
+ local cachename = caches.setfirstwritablefile(cleanname,"curl")
+ if not cached[name] then
if not io.exists(cachename) then
- curl.cached[name] = cachename
+ cached[name] = cachename
local command = "curl --silent --create-dirs --output " .. cachename .. " " .. name -- no protocol .. "://"
os.spawn(command)
end
if io.exists(cachename) then
- curl.cached[name] = cachename
+ cached[name] = cachename
else
- curl.cached[name] = ""
+ cached[name] = ""
end
end
- return curl.cached[name]
+ return cached[name]
end
function finders.curl(protocol,filename)
@@ -11214,6 +11717,8 @@ if not modules then modules = { } end modules ['data-lua'] = {
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local report_resolvers = logs.new("resolvers")
+
local gsub, insert = string.gsub, table.insert
local unpack = unpack or table.unpack
@@ -11242,7 +11747,7 @@ local function thepath(...)
local t = { ... } t[#t+1] = "?.lua"
local path = file.join(unpack(t))
if trace_locating then
- logs.report("fileio","! appending '%s' to 'package.path'",path)
+ report_resolvers("! appending '%s' to 'package.path'",path)
end
return path
end
@@ -11264,11 +11769,11 @@ local function loaded(libpaths,name,simple)
local libpath = libpaths[i]
local resolved = gsub(libpath,"%?",simple)
if trace_locating then -- more detail
- logs.report("fileio","! checking for '%s' on 'package.path': '%s' => '%s'",simple,libpath,resolved)
+ report_resolvers("! checking for '%s' on 'package.path': '%s' => '%s'",simple,libpath,resolved)
end
if resolvers.isreadable.file(resolved) then
if trace_locating then
- logs.report("fileio","! lib '%s' located via 'package.path': '%s'",name,resolved)
+ report_resolvers("! lib '%s' located via 'package.path': '%s'",name,resolved)
end
return loadfile(resolved)
end
@@ -11278,17 +11783,17 @@ end
package.loaders[2] = function(name) -- was [#package.loaders+1]
if trace_locating then -- mode detail
- logs.report("fileio","! locating '%s'",name)
+ report_resolvers("! locating '%s'",name)
end
for i=1,#libformats do
local format = libformats[i]
local resolved = resolvers.find_file(name,format) or ""
if trace_locating then -- mode detail
- logs.report("fileio","! checking for '%s' using 'libformat path': '%s'",name,format)
+ report_resolvers("! checking for '%s' using 'libformat path': '%s'",name,format)
end
if resolved ~= "" then
if trace_locating then
- logs.report("fileio","! lib '%s' located via environment: '%s'",name,resolved)
+ report_resolvers("! lib '%s' located via environment: '%s'",name,resolved)
end
return loadfile(resolved)
end
@@ -11311,11 +11816,11 @@ package.loaders[2] = function(name) -- was [#package.loaders+1]
local path = paths[p]
local resolved = file.join(path,libname)
if trace_locating then -- mode detail
- logs.report("fileio","! checking for '%s' using 'clibformat path': '%s'",libname,path)
+ report_resolvers("! checking for '%s' using 'clibformat path': '%s'",libname,path)
end
if resolvers.isreadable.file(resolved) then
if trace_locating then
- logs.report("fileio","! lib '%s' located via 'clibformat': '%s'",libname,resolved)
+ report_resolvers("! lib '%s' located via 'clibformat': '%s'",libname,resolved)
end
return package.loadlib(resolved,name)
end
@@ -11325,28 +11830,28 @@ package.loaders[2] = function(name) -- was [#package.loaders+1]
local libpath = clibpaths[i]
local resolved = gsub(libpath,"?",simple)
if trace_locating then -- more detail
- logs.report("fileio","! checking for '%s' on 'package.cpath': '%s'",simple,libpath)
+ report_resolvers("! checking for '%s' on 'package.cpath': '%s'",simple,libpath)
end
if resolvers.isreadable.file(resolved) then
if trace_locating then
- logs.report("fileio","! lib '%s' located via 'package.cpath': '%s'",name,resolved)
+ report_resolvers("! lib '%s' located via 'package.cpath': '%s'",name,resolved)
end
return package.loadlib(resolved,name)
end
end
-- just in case the distribution is messed up
if trace_loading then -- more detail
- logs.report("fileio","! checking for '%s' using 'luatexlibs': '%s'",name)
+ report_resolvers("! checking for '%s' using 'luatexlibs': '%s'",name)
end
local resolved = resolvers.find_file(file.basename(name),'luatexlibs') or ""
if resolved ~= "" then
if trace_locating then
- logs.report("fileio","! lib '%s' located by basename via environment: '%s'",name,resolved)
+ report_resolvers("! lib '%s' located by basename via environment: '%s'",name,resolved)
end
return loadfile(resolved)
end
if trace_locating then
- logs.report("fileio",'? unable to locate lib: %s',name)
+ report_resolvers('? unable to locate lib: %s',name)
end
-- return "unable to locate " .. name
end
@@ -11358,113 +11863,6 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['luat-kps'] = {
- version = 1.001,
- comment = "companion to luatools.lua",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-<p>This file is used when we want the input handlers to behave like
-<type>kpsewhich</type>. What to do with the following:</p>
-
-<typing>
-{$SELFAUTOLOC,$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}
-$SELFAUTOLOC : /usr/tex/bin/platform
-$SELFAUTODIR : /usr/tex/bin
-$SELFAUTOPARENT : /usr/tex
-</typing>
-
-<p>How about just forgetting about them?</p>
---ldx]]--
-
-local suffixes = resolvers.suffixes
-local formats = resolvers.formats
-
-suffixes['gf'] = { '<resolution>gf' }
-suffixes['pk'] = { '<resolution>pk' }
-suffixes['base'] = { 'base' }
-suffixes['bib'] = { 'bib' }
-suffixes['bst'] = { 'bst' }
-suffixes['cnf'] = { 'cnf' }
-suffixes['mem'] = { 'mem' }
-suffixes['mf'] = { 'mf' }
-suffixes['mfpool'] = { 'pool' }
-suffixes['mft'] = { 'mft' }
-suffixes['mppool'] = { 'pool' }
-suffixes['graphic/figure'] = { 'eps', 'epsi' }
-suffixes['texpool'] = { 'pool' }
-suffixes['PostScript header'] = { 'pro' }
-suffixes['ist'] = { 'ist' }
-suffixes['web'] = { 'web', 'ch' }
-suffixes['cweb'] = { 'w', 'web', 'ch' }
-suffixes['cmap files'] = { 'cmap' }
-suffixes['lig files'] = { 'lig' }
-suffixes['bitmap font'] = { }
-suffixes['MetaPost support'] = { }
-suffixes['TeX system documentation'] = { }
-suffixes['TeX system sources'] = { }
-suffixes['dvips config'] = { }
-suffixes['type42 fonts'] = { }
-suffixes['web2c files'] = { }
-suffixes['other text files'] = { }
-suffixes['other binary files'] = { }
-suffixes['opentype fonts'] = { 'otf' }
-
-suffixes['fmt'] = { 'fmt' }
-suffixes['texmfscripts'] = { 'rb','lua','py','pl' }
-
-suffixes['pdftex config'] = { }
-suffixes['Troff fonts'] = { }
-
-suffixes['ls-R'] = { }
-
---[[ldx--
-<p>If you wondered abou tsome of the previous mappings, how about
-the next bunch:</p>
---ldx]]--
-
-formats['bib'] = ''
-formats['bst'] = ''
-formats['mft'] = ''
-formats['ist'] = ''
-formats['web'] = ''
-formats['cweb'] = ''
-formats['MetaPost support'] = ''
-formats['TeX system documentation'] = ''
-formats['TeX system sources'] = ''
-formats['Troff fonts'] = ''
-formats['dvips config'] = ''
-formats['graphic/figure'] = ''
-formats['ls-R'] = ''
-formats['other text files'] = ''
-formats['other binary files'] = ''
-
-formats['gf'] = ''
-formats['pk'] = ''
-formats['base'] = 'MFBASES'
-formats['cnf'] = ''
-formats['mem'] = 'MPMEMS'
-formats['mf'] = 'MFINPUTS'
-formats['mfpool'] = 'MFPOOL'
-formats['mppool'] = 'MPPOOL'
-formats['texpool'] = 'TEXPOOL'
-formats['PostScript header'] = 'TEXPSHEADERS'
-formats['cmap files'] = 'CMAPFONTS'
-formats['type42 fonts'] = 'T42FONTS'
-formats['web2c files'] = 'WEB2C'
-formats['pdftex config'] = 'PDFTEXCONFIG'
-formats['texmfscripts'] = 'TEXMFSCRIPTS'
-formats['bitmap font'] = ''
-formats['lig files'] = 'LIGFONTS'
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
if not modules then modules = { } end modules ['data-aux'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -11474,49 +11872,52 @@ if not modules then modules = { } end modules ['data-aux'] = {
}
local find = string.find
+local type, next = type, next
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local report_resolvers = logs.new("resolvers")
+
function resolvers.update_script(oldname,newname) -- oldname -> own.name, not per se a suffix
local scriptpath = "scripts/context/lua"
newname = file.addsuffix(newname,"lua")
local oldscript = resolvers.clean_path(oldname)
if trace_locating then
- logs.report("fileio","to be replaced old script %s", oldscript)
+ report_resolvers("to be replaced old script %s", oldscript)
end
local newscripts = resolvers.find_files(newname) or { }
if #newscripts == 0 then
if trace_locating then
- logs.report("fileio","unable to locate new script")
+ report_resolvers("unable to locate new script")
end
else
for i=1,#newscripts do
local newscript = resolvers.clean_path(newscripts[i])
if trace_locating then
- logs.report("fileio","checking new script %s", newscript)
+ report_resolvers("checking new script %s", newscript)
end
if oldscript == newscript then
if trace_locating then
- logs.report("fileio","old and new script are the same")
+ report_resolvers("old and new script are the same")
end
elseif not find(newscript,scriptpath) then
if trace_locating then
- logs.report("fileio","new script should come from %s",scriptpath)
+ report_resolvers("new script should come from %s",scriptpath)
end
elseif not (find(oldscript,file.removesuffix(newname).."$") or find(oldscript,newname.."$")) then
if trace_locating then
- logs.report("fileio","invalid new script name")
+ report_resolvers("invalid new script name")
end
else
local newdata = io.loaddata(newscript)
if newdata then
if trace_locating then
- logs.report("fileio","old script content replaced by new content")
+ report_resolvers("old script content replaced by new content")
end
io.savedata(oldscript,newdata)
break
elseif trace_locating then
- logs.report("fileio","unable to load new script")
+ report_resolvers("unable to load new script")
end
end
end
@@ -11536,70 +11937,116 @@ if not modules then modules = { } end modules ['data-tmf'] = {
license = "see context related readme files"
}
-local find, gsub, match = string.find, string.gsub, string.match
-local getenv, setenv = os.getenv, os.setenv
+-- = <<
+-- ? ??
+-- < +=
+-- > =+
--- loads *.tmf files in minimal tree roots (to be optimized and documented)
+function resolvers.load_tree(tree)
+ if type(tree) == "string" and tree ~= "" then
-function resolvers.check_environment(tree)
- logs.simpleline()
- setenv('TMP', getenv('TMP') or getenv('TEMP') or getenv('TMPDIR') or getenv('HOME'))
- setenv('TEXOS', getenv('TEXOS') or ("texmf-" .. os.platform))
- setenv('TEXPATH', gsub(tree or "tex","\/+$",''))
- setenv('TEXMFOS', getenv('TEXPATH') .. "/" .. getenv('TEXOS'))
- logs.simpleline()
- logs.simple("preset : TEXPATH => %s", getenv('TEXPATH'))
- logs.simple("preset : TEXOS => %s", getenv('TEXOS'))
- logs.simple("preset : TEXMFOS => %s", getenv('TEXMFOS'))
- logs.simple("preset : TMP => %s", getenv('TMP'))
- logs.simple('')
-end
+ local getenv, setenv = resolvers.getenv, resolvers.setenv
-function resolvers.load_environment(name) -- todo: key=value as well as lua
- local f = io.open(name)
- if f then
- for line in f:lines() do
- if find(line,"^[%%%#]") then
- -- skip comment
- else
- local key, how, value = match(line,"^(.-)%s*([<=>%?]+)%s*(.*)%s*$")
- if how then
- value = gsub(value,"%%(.-)%%", function(v) return getenv(v) or "" end)
- if how == "=" or how == "<<" then
- setenv(key,value)
- elseif how == "?" or how == "??" then
- setenv(key,getenv(key) or value)
- elseif how == "<" or how == "+=" then
- if getenv(key) then
- setenv(key,getenv(key) .. io.fileseparator .. value)
- else
- setenv(key,value)
- end
- elseif how == ">" or how == "=+" then
- if getenv(key) then
- setenv(key,value .. io.pathseparator .. getenv(key))
- else
- setenv(key,value)
- end
- end
- end
- end
+ -- later might listen to the raw osenv var as well
+ local texos = "texmf-" .. os.platform
+
+ local oldroot = environment.texroot
+ local newroot = file.collapse_path(tree)
+
+ local newtree = file.join(newroot,texos)
+ local newpath = file.join(newtree,"bin")
+
+ if not lfs.isdir(newtree) then
+ logs.simple("no '%s' under tree %s",texos,tree)
+ os.exit()
end
- f:close()
+ if not lfs.isdir(newpath) then
+ logs.simple("no '%s/bin' under tree %s",texos,tree)
+ os.exit()
+ end
+
+ local texmfos = newtree
+
+ environment.texroot = newroot
+ environment.texos = texos
+ environment.texmfos = texmfos
+
+ setenv('SELFAUTOPARENT', newroot)
+ setenv('SELFAUTODIR', newtree)
+ setenv('SELFAUTOLOC', newpath)
+ setenv('TEXROOT', newroot)
+ setenv('TEXOS', texos)
+ setenv('TEXMFOS', texmfos)
+ setenv('TEXROOT', newroot)
+ setenv('TEXMFCNF', resolvers.luacnfspec)
+ setenv("PATH", newpath .. io.pathseparator .. getenv("PATH"))
+
+ logs.simple("changing from root '%s' to '%s'",oldroot,newroot)
+ logs.simple("prepending '%s' to binary path",newpath)
+ logs.simple()
end
end
-function resolvers.load_tree(tree)
- if tree and tree ~= "" then
- local setuptex = 'setuptex.tmf'
- if lfs.attributes(tree, "mode") == "directory" then -- check if not nil
- setuptex = tree .. "/" .. setuptex
- else
- setuptex = tree
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['data-lst'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- used in mtxrun
+
+local find, concat, upper, format = string.find, table.concat, string.upper, string.format
+
+resolvers.listers = resolvers.listers or { }
+
+local function tabstr(str)
+ if type(str) == 'table' then
+ return concat(str," | ")
+ else
+ return str
+ end
+end
+
+local function list(list,report,pattern)
+ pattern = pattern and pattern ~= "" and upper(pattern) or ""
+ local instance = resolvers.instance
+ local report = report or texio.write_nl
+ local sorted = table.sortedkeys(list)
+ for i=1,#sorted do
+ local key = sorted[i]
+ if pattern == "" or find(upper(key),pattern) then
+ report(format('%s %s=%s',instance.origins[key] or "---",key,tabstr(list[key])))
end
- if io.exists(setuptex) then
- resolvers.check_environment(tree)
- resolvers.load_environment(setuptex)
+ end
+end
+
+function resolvers.listers.variables (report,pattern) list(resolvers.instance.variables, report,pattern) end
+function resolvers.listers.expansions(report,pattern) list(resolvers.instance.expansions,report,pattern) end
+
+function resolvers.listers.configurations(report,pattern)
+ pattern = pattern and pattern ~= "" and upper(pattern) or ""
+ local report = report or texio.write_nl
+ local instance = resolvers.instance
+ local sorted = table.sortedkeys(instance.kpsevars)
+ for i=1,#sorted do
+ local key = sorted[i]
+ if pattern == "" or find(upper(key),pattern) then
+ report(format("%s\n",key))
+ local order = instance.order
+ for i=1,#order do
+ local str = order[i][key]
+ if str then
+ report(format("\t%s\t%s",i,str))
+ end
+ end
+ report("")
end
end
end
@@ -11708,111 +12155,140 @@ function states.get(key,default)
return states.get_by_tag(states.tag,key,default)
end
---~ states.data.update = {
---~ ["version"] = {
---~ ["major"] = 0,
---~ ["minor"] = 1,
---~ },
---~ ["rsync"] = {
---~ ["server"] = "contextgarden.net",
---~ ["module"] = "minimals",
---~ ["repository"] = "current",
---~ ["flags"] = "-rpztlv --stats",
---~ },
---~ ["tasks"] = {
---~ ["update"] = true,
---~ ["make"] = true,
---~ ["delete"] = false,
---~ },
---~ ["platform"] = {
---~ ["host"] = true,
---~ ["other"] = {
---~ ["mswin"] = false,
---~ ["linux"] = false,
---~ ["linux-64"] = false,
---~ ["osx-intel"] = false,
---~ ["osx-ppc"] = false,
---~ ["sun"] = false,
---~ },
---~ },
---~ ["context"] = {
---~ ["available"] = {"current", "beta", "alpha", "experimental"},
---~ ["selected"] = "current",
---~ },
---~ ["formats"] = {
---~ ["cont-en"] = true,
---~ ["cont-nl"] = true,
---~ ["cont-de"] = false,
---~ ["cont-cz"] = false,
---~ ["cont-fr"] = false,
---~ ["cont-ro"] = false,
---~ },
---~ ["engine"] = {
---~ ["pdftex"] = {
---~ ["install"] = true,
---~ ["formats"] = {
---~ ["pdftex"] = true,
---~ },
---~ },
---~ ["luatex"] = {
---~ ["install"] = true,
---~ ["formats"] = {
---~ },
---~ },
---~ ["xetex"] = {
---~ ["install"] = true,
---~ ["formats"] = {
---~ ["xetex"] = false,
---~ },
---~ },
---~ ["metapost"] = {
---~ ["install"] = true,
---~ ["formats"] = {
---~ ["mpost"] = true,
---~ ["metafun"] = true,
---~ },
---~ },
---~ },
---~ ["fonts"] = {
---~ },
---~ ["doc"] = {
---~ },
---~ ["modules"] = {
---~ ["f-urwgaramond"] = false,
---~ ["f-urwgothic"] = false,
---~ ["t-bnf"] = false,
---~ ["t-chromato"] = false,
---~ ["t-cmscbf"] = false,
---~ ["t-cmttbf"] = false,
---~ ["t-construction-plan"] = false,
---~ ["t-degrade"] = false,
---~ ["t-french"] = false,
---~ ["t-lettrine"] = false,
---~ ["t-lilypond"] = false,
---~ ["t-mathsets"] = false,
---~ ["t-tikz"] = false,
---~ ["t-typearea"] = false,
---~ ["t-vim"] = false,
---~ },
---~ }
-
---~ states.save("teststate", "update")
---~ states.load("teststate", "update")
-
---~ print(states.get_by_tag("update","rsync.server","unknown"))
---~ states.set_by_tag("update","rsync.server","oeps")
---~ print(states.get_by_tag("update","rsync.server","unknown"))
---~ states.save("teststate", "update")
---~ states.load("teststate", "update")
---~ print(states.get_by_tag("update","rsync.server","unknown"))
+
+
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['luat-fmt'] = {
+ version = 1.001,
+ comment = "companion to mtxrun",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- helper for mtxrun
+
+function environment.make_format(name)
+ -- change to format path (early as we need expanded paths)
+ local olddir = lfs.currentdir()
+ local path = caches.getwritablepath("formats") or "" -- maybe platform
+ if path ~= "" then
+ lfs.chdir(path)
+ end
+ logs.simple("format path: %s",lfs.currentdir())
+ -- check source file
+ local texsourcename = file.addsuffix(name,"tex")
+ local fulltexsourcename = resolvers.find_file(texsourcename,"tex") or ""
+ if fulltexsourcename == "" then
+ logs.simple("no tex source file with name: %s",texsourcename)
+ lfs.chdir(olddir)
+ return
+ else
+ logs.simple("using tex source file: %s",fulltexsourcename)
+ end
+ local texsourcepath = dir.expand_name(file.dirname(fulltexsourcename)) -- really needed
+ -- check specification
+ local specificationname = file.replacesuffix(fulltexsourcename,"lus")
+ local fullspecificationname = resolvers.find_file(specificationname,"tex") or ""
+ if fullspecificationname == "" then
+ specificationname = file.join(texsourcepath,"context.lus")
+ fullspecificationname = resolvers.find_file(specificationname,"tex") or ""
+ end
+ if fullspecificationname == "" then
+ logs.simple("unknown stub specification: %s",specificationname)
+ lfs.chdir(olddir)
+ return
+ end
+ local specificationpath = file.dirname(fullspecificationname)
+ -- load specification
+ local usedluastub = nil
+ local usedlualibs = dofile(fullspecificationname)
+ if type(usedlualibs) == "string" then
+ usedluastub = file.join(file.dirname(fullspecificationname),usedlualibs)
+ elseif type(usedlualibs) == "table" then
+ logs.simple("using stub specification: %s",fullspecificationname)
+ local texbasename = file.basename(name)
+ local luastubname = file.addsuffix(texbasename,"lua")
+ local lucstubname = file.addsuffix(texbasename,"luc")
+ -- pack libraries in stub
+ logs.simple("creating initialization file: %s",luastubname)
+ utils.merger.selfcreate(usedlualibs,specificationpath,luastubname)
+ -- compile stub file (does not save that much as we don't use this stub at startup any more)
+ local strip = resolvers.boolean_variable("LUACSTRIP", true)
+ if utils.lua.compile(luastubname,lucstubname,false,strip) and lfs.isfile(lucstubname) then
+ logs.simple("using compiled initialization file: %s",lucstubname)
+ usedluastub = lucstubname
+ else
+ logs.simple("using uncompiled initialization file: %s",luastubname)
+ usedluastub = luastubname
+ end
+ else
+ logs.simple("invalid stub specification: %s",fullspecificationname)
+ lfs.chdir(olddir)
+ return
+ end
+ -- generate format
+ local q = string.quote
+ local command = string.format("luatex --ini --lua=%s %s %sdump",q(usedluastub),q(fulltexsourcename),os.platform == "unix" and "\\\\" or "\\")
+ logs.simple("running command: %s\n",command)
+ os.spawn(command)
+ -- remove related mem files
+ local pattern = file.removesuffix(file.basename(usedluastub)).."-*.mem"
+ -- logs.simple("removing related mplib format with pattern '%s'", pattern)
+ local mp = dir.glob(pattern)
+ if mp then
+ for i=1,#mp do
+ local name = mp[i]
+ logs.simple("removing related mplib format %s", file.basename(name))
+ os.remove(name)
+ end
+ end
+ lfs.chdir(olddir)
+end
+
+function environment.run_format(name,data,more)
+ -- hm, rather old code here; we can now use the file.whatever functions
+ if name and name ~= "" then
+ local barename = file.removesuffix(name)
+ local fmtname = caches.getfirstreadablefile(file.addsuffix(barename,"fmt"),"formats")
+ if fmtname == "" then
+ fmtname = resolvers.find_file(file.addsuffix(barename,"fmt")) or ""
+ end
+ fmtname = resolvers.clean_path(fmtname)
+ if fmtname == "" then
+ logs.simple("no format with name: %s",name)
+ else
+ local barename = file.removesuffix(name) -- expanded name
+ local luaname = file.addsuffix(barename,"luc")
+ if not lfs.isfile(luaname) then
+ luaname = file.addsuffix(barename,"lua")
+ end
+ if not lfs.isfile(luaname) then
+ logs.simple("using format name: %s",fmtname)
+ logs.simple("no luc/lua with name: %s",barename)
+ else
+ local q = string.quote
+ local command = string.format("luatex --fmt=%s --lua=%s %s %s",q(barename),q(luaname),q(data),more ~= "" and q(more) or "")
+ logs.simple("running command: %s",command)
+ os.spawn(command)
+ end
+ end
+ end
+end
end -- of closure
-- end library merge
-own = { } -- not local
+own = { } -- not local, might change
+
+own.libs = { -- order can be made better
-own.libs = { -- todo: check which ones are really needed
'l-string.lua',
'l-lpeg.lua',
'l-table.lua',
@@ -11825,24 +12301,32 @@ own.libs = { -- todo: check which ones are really needed
'l-url.lua',
'l-dir.lua',
'l-boolean.lua',
+ 'l-unicode.lua',
'l-math.lua',
--- 'l-unicode.lua',
--- 'l-tex.lua',
'l-utils.lua',
'l-aux.lua',
--- 'l-xml.lua',
+
+ 'trac-inf.lua',
+ 'trac-set.lua',
'trac-tra.lua',
+ 'trac-log.lua',
+ 'trac-pro.lua',
+ 'luat-env.lua', -- can come before inf (as in mkiv)
+
'lxml-tab.lua',
'lxml-lpt.lua',
--- 'lxml-ent.lua',
+ -- 'lxml-ent.lua',
'lxml-mis.lua',
'lxml-aux.lua',
'lxml-xml.lua',
- 'luat-env.lua',
- 'trac-inf.lua',
- 'trac-log.lua',
- 'data-res.lua',
+
+
+ 'data-ini.lua',
+ 'data-exp.lua',
+ 'data-env.lua',
'data-tmp.lua',
+ 'data-met.lua',
+ 'data-res.lua',
'data-pre.lua',
'data-inp.lua',
'data-out.lua',
@@ -11851,13 +12335,15 @@ own.libs = { -- todo: check which ones are really needed
-- 'data-tex.lua',
-- 'data-bin.lua',
'data-zip.lua',
+ 'data-tre.lua',
'data-crl.lua',
'data-lua.lua',
- 'data-kps.lua', -- so that we can replace kpsewhich
'data-aux.lua', -- updater
- 'data-tmf.lua', -- tree files
- -- needed ?
- 'luat-sta.lua', -- states
+ 'data-tmf.lua',
+ 'data-lst.lua',
+
+ 'luat-sta.lua',
+ 'luat-fmt.lua',
}
-- We need this hack till luatex is fixed.
@@ -11870,36 +12356,61 @@ end
-- End of hack.
-own.name = (environment and environment.ownname) or arg[0] or 'luatools.lua'
+own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua'
+own.path = string.gsub(string.match(own.name,"^(.+)[\\/].-$") or ".","\\","/")
+
+local ownpath, owntree = own.path, environment and environment.ownpath or own.path
+
+own.list = {
+ '.',
+ ownpath ,
+ ownpath .. "/../sources", -- HH's development path
+ owntree .. "/../../texmf-local/tex/context/base",
+ owntree .. "/../../texmf-context/tex/context/base",
+ owntree .. "/../../texmf-dist/tex/context/base",
+ owntree .. "/../../texmf/tex/context/base",
+ owntree .. "/../../../texmf-local/tex/context/base",
+ owntree .. "/../../../texmf-context/tex/context/base",
+ owntree .. "/../../../texmf-dist/tex/context/base",
+ owntree .. "/../../../texmf/tex/context/base",
+}
+if own.path == "." then table.remove(own.list,1) end
-own.path = string.match(own.name,"^(.+)[\\/].-$") or "."
-own.list = { '.' }
-if own.path ~= '.' then
- table.insert(own.list,own.path)
+local function locate_libs()
+ for l=1,#own.libs do
+ local lib = own.libs[l]
+ for p =1,#own.list do
+ local pth = own.list[p]
+ local filename = pth .. "/" .. lib
+ local found = lfs.isfile(filename)
+ if found then
+ return pth
+ end
+ end
+ end
end
-table.insert(own.list,own.path.."/../../../tex/context/base")
-table.insert(own.list,own.path.."/mtx")
-table.insert(own.list,own.path.."/../sources")
-local function locate_libs()
- for _, lib in pairs(own.libs) do
- for _, pth in pairs(own.list) do
- local filename = string.gsub(pth .. "/" .. lib,"\\","/")
+local function load_libs()
+ local found = locate_libs()
+ if found then
+ for l=1,#own.libs do
+ local filename = found .. "/" .. own.libs[l]
local codeblob = loadfile(filename)
if codeblob then
codeblob()
- own.list = { pth } -- speed up te search
- break
end
end
+ else
+ resolvers = nil
end
end
if not resolvers then
- locate_libs()
+ load_libs()
end
+
if not resolvers then
print("")
print("Mtxrun is unable to start up due to lack of libraries. You may")
@@ -11909,7 +12420,11 @@ if not resolvers then
os.exit()
end
-logs.setprogram('MTXrun',"TDS Runner Tool 1.24",environment.arguments["verbose"] or false)
+logs.setprogram('MTXrun',"TDS Runner Tool 1.26")
+
+if environment.arguments["verbose"] then
+ trackers.enable("resolvers.locating")
+end
local instance = resolvers.reset()
@@ -11937,8 +12452,8 @@ messages.help = [[
--ifchanged=filename only execute when given file has changed (md checksum)
--iftouched=old,new only execute when given file has changed (time stamp)
---make create stubs for (context related) scripts
---remove remove stubs (context related) scripts
+--makestubs create stubs for (context related) scripts
+--removestubs remove stubs (context related) scripts
--stubpath=binpath paths where stubs wil be written
--windows create windows (mswin) stubs
--unix create unix (linux) stubs
@@ -11958,8 +12473,24 @@ messages.help = [[
--forcekpse force using kpse (handy when no mkiv and cache installed but less functionality)
--prefixes show supported prefixes
+
+--generate generate file database
+
+--variables show configuration variables
+--expansions show expanded variables
+--configurations show configuration order
+--expand-braces expand complex variable
+--expand-path expand variable (resolve paths)
+--expand-var expand variable (resolve references)
+--show-path show path expansion of ...
+--var-value report value of variable
+--find-file report file location
+--find-path report path of file
+
+--pattern=str filter variables
]]
+
runners.applications = {
["lua"] = "luatex --luaonly",
["luc"] = "luatex --luaonly",
@@ -12012,45 +12543,40 @@ end
function runners.prepare()
local checkname = environment.argument("ifchanged")
- if checkname and checkname ~= "" then
+ local verbose = environment.argument("verbose")
+ if type(checkname) == "string" and checkname ~= "" then
local oldchecksum = file.loadchecksum(checkname)
local newchecksum = file.checksum(checkname)
if oldchecksum == newchecksum then
- logs.simple("file '%s' is unchanged",checkname)
+ if verbose then
+ logs.simple("file '%s' is unchanged",checkname)
+ end
return "skip"
- else
+ elseif verbose then
logs.simple("file '%s' is changed, processing started",checkname)
end
file.savechecksum(checkname)
end
- local oldname, newname = string.split(environment.argument("iftouched") or "", ",")
- if oldname and newname and oldname ~= "" and newname ~= "" then
- if not file.needs_updating(oldname,newname) then
- logs.simple("file '%s' and '%s' have same age",oldname,newname)
- return "skip"
- else
- logs.simple("file '%s' is older than '%s'",oldname,newname)
- end
- end
- local tree = environment.argument('tree') or ""
- if environment.argument('autotree') then
- tree = os.getenv('TEXMFSTART_TREE') or os.getenv('TEXMFSTARTTREE') or tree
- end
- if tree and tree ~= "" then
- resolvers.load_tree(tree)
- end
- local env = environment.argument('environment') or ""
- if env and env ~= "" then
- for _,e in pairs(string.split(env)) do
- -- maybe force suffix when not given
- resolvers.load_tree(e)
+ local touchname = environment.argument("iftouched")
+ if type(touchname) == "string" and touchname ~= "" then
+ local oldname, newname = string.split(touchname, ",")
+ if oldname and newname and oldname ~= "" and newname ~= "" then
+ if not file.needs_updating(oldname,newname) then
+ if verbose then
+ logs.simple("file '%s' and '%s' have same age",oldname,newname)
+ end
+ return "skip"
+ elseif verbose then
+ logs.simple("file '%s' is older than '%s'",oldname,newname)
+ end
end
end
local runpath = environment.argument("path")
- if runpath and not lfs.chdir(runpath) then
+ if type(runpath) == "string" and not lfs.chdir(runpath) then
logs.simple("unable to change to path '%s'",runpath)
return "error"
end
+ runners.prepare = function() end
return "run"
end
@@ -12165,7 +12691,7 @@ function runners.execute_program(fullname)
return false
end
--- the --usekpse flag will fallback on kpse (hm, we can better update mtx-stubs)
+-- the --usekpse flag will fallback (not default) on kpse (hm, we can better update mtx-stubs)
local windows_stub = '@echo off\013\010setlocal\013\010set ownpath=%%~dp0%%\013\010texlua "%%ownpath%%mtxrun.lua" --usekpse --execute %s %%*\013\010endlocal\013\010'
local unix_stub = '#!/bin/sh\010mtxrun --usekpse --execute %s \"$@\"\010'
@@ -12288,7 +12814,7 @@ end
function runners.launch_file(filename)
instance.allresults = true
- logs.setverbose(true)
+ trackers.enable("resolvers.locating")
local pattern = environment.arguments["pattern"]
if not pattern or pattern == "" then
pattern = filename
@@ -12368,7 +12894,19 @@ function runners.find_mtx_script(filename)
return fullname
end
-function runners.execute_ctx_script(filename)
+function runners.register_arguments(...)
+ local arguments = environment.arguments_after
+ local passedon = { ... }
+ for i=#passedon,1,-1 do
+ local pi = passedon[i]
+ if pi then
+ table.insert(arguments,1,pi)
+ end
+ end
+end
+
+function runners.execute_ctx_script(filename,...)
+ runners.register_arguments(...)
local arguments = environment.arguments_after
local fullname = runners.find_mtx_script(filename) or ""
if file.extname(fullname) == "cld" then
@@ -12381,7 +12919,7 @@ function runners.execute_ctx_script(filename)
-- retry after generate but only if --autogenerate
if fullname == "" and environment.argument("autogenerate") then -- might become the default
instance.renewcache = true
- logs.setverbose(true)
+ trackers.enable("resolvers.locating")
resolvers.load()
--
fullname = runners.find_mtx_script(filename) or ""
@@ -12421,10 +12959,9 @@ function runners.execute_ctx_script(filename)
return true
end
else
- -- logs.setverbose(true)
if filename == "" or filename == "help" then
local context = resolvers.find_file("mtx-context.lua")
- logs.setverbose(true)
+ trackers.enable("resolvers.locating")
if context ~= "" then
local result = dir.glob((string.gsub(context,"mtx%-context","mtx-*"))) -- () needed
local valid = { }
@@ -12558,80 +13095,317 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
end
+ function runners.loadbase()
+ end
+
else
- resolvers.load()
+ function runners.loadbase(...)
+ if not resolvers.load(...) then
+ logs.simple("forcing cache reload")
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ if not resolvers.load(...) then
+ logs.simple("the resolver databases are not present or outdated")
+ end
+ end
+ end
end
+resolvers.load_tree(environment.argument('tree'))
+
if environment.argument("selfmerge") then
+
-- embed used libraries
- utils.merger.selfmerge(own.name,own.libs,own.list)
+
+ runners.loadbase()
+ local found = locate_libs()
+ if found then
+ utils.merger.selfmerge(own.name,own.libs,{ found })
+ end
+
elseif environment.argument("selfclean") then
+
-- remove embedded libraries
+
+ runners.loadbase()
utils.merger.selfclean(own.name)
+
elseif environment.argument("selfupdate") then
- logs.setverbose(true)
+
+ runners.loadbase()
+ trackers.enable("resolvers.locating")
resolvers.update_script(own.name,"mtxrun")
+
elseif environment.argument("ctxlua") or environment.argument("internal") then
+
-- run a script by loading it (using libs)
+
+ runners.loadbase()
ok = runners.execute_script(filename,true)
+
elseif environment.argument("script") or environment.argument("scripts") then
+
-- run a script by loading it (using libs), pass args
+
+ runners.loadbase()
if is_mkii_stub then
- -- execute mkii script
ok = runners.execute_script(filename,false,true)
else
ok = runners.execute_ctx_script(filename)
end
+
elseif environment.argument("execute") then
+
-- execute script
+
+ runners.loadbase()
ok = runners.execute_script(filename)
+
elseif environment.argument("direct") then
+
-- equals bin:
+
+ runners.loadbase()
ok = runners.execute_program(filename)
+
elseif environment.argument("edit") then
+
-- edit file
+
+ runners.loadbase()
runners.edit_script(filename)
+
elseif environment.argument("launch") then
+
+ runners.loadbase()
runners.launch_file(filename)
-elseif environment.argument("make") then
- -- make stubs
+
+elseif environment.argument("makestubs") then
+
+ -- make stubs (depricated)
+
runners.handle_stubs(true)
-elseif environment.argument("remove") then
- -- remove stub
+
+elseif environment.argument("removestubs") then
+
+ -- remove stub (depricated)
+
+ runners.loadbase()
runners.handle_stubs(false)
+
elseif environment.argument("resolve") then
+
-- resolve string
+
+ runners.loadbase()
runners.resolve_string(filename)
+
elseif environment.argument("locate") then
+
-- locate file
+
+ runners.loadbase()
runners.locate_file(filename)
-elseif environment.argument("platform")then
+
+elseif environment.argument("platform") or environment.argument("show-platform") then
+
-- locate platform
+
+ runners.loadbase()
runners.locate_platform()
+
elseif environment.argument("prefixes") then
+
+ runners.loadbase()
runners.prefixes()
+
elseif environment.argument("timedrun") then
+
-- locate platform
+
+ runners.loadbase()
runners.timedrun(filename)
+
+elseif environment.argument("variables") or environment.argument("show-variables") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--variables",filename)
+
+ resolvers.load("nofiles")
+ resolvers.listers.variables(false,environment.argument("pattern"))
+
+elseif environment.argument("expansions") or environment.argument("show-expansions") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--expansions",filename)
+
+ resolvers.load("nofiles")
+ resolvers.listers.expansions(false,environment.argument("pattern"))
+
+elseif environment.argument("configurations") or environment.argument("show-configurations") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--configurations",filename)
+
+ resolvers.load("nofiles")
+ resolvers.listers.configurations(false,environment.argument("pattern"))
+
+elseif environment.argument("find-file") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename)
+
+ resolvers.load()
+ local pattern = environment.argument("pattern")
+ local format = environment.arguments["format"] or instance.format
+ if not pattern then
+ runners.register_arguments(filename)
+ environment.initialize_arguments(environment.arguments_after)
+ resolvers.for_files(resolvers.find_files,environment.files,format)
+ elseif type(pattern) == "string" then
+ instance.allresults = true -- brrrr
+ resolvers.for_files(resolvers.find_files,{ pattern }, format)
+ end
+
+elseif environment.argument("find-path") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--find-path",filename)
+
+ resolvers.load()
+ local path = resolvers.find_path(filename, instance.my_format)
+ if logs.verbose then
+ logs.simple(path)
+ else
+ print(path)
+ end
+
+elseif environment.argument("expand-braces") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initialize_arguments(environment.arguments_after)
+ resolvers.for_files(resolvers.expand_braces, environment.files)
+
+elseif environment.argument("expand-path") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--expand-path",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initialize_arguments(environment.arguments_after)
+ resolvers.for_files(resolvers.expand_path, environment.files)
+
+elseif environment.argument("expand-var") or environment.argument("expand-variable") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--expand-var",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initialize_arguments(environment.arguments_after)
+ resolvers.for_files(resolvers.expand_var, environment.files)
+
+elseif environment.argument("show-path") or environment.argument("path-value") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--show-path",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initialize_arguments(environment.arguments_after)
+ resolvers.for_files(resolvers.show_path, environment.files)
+
+elseif environment.argument("var-value") or environment.argument("show-value") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--show-value",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initialize_arguments(environment.arguments_after)
+ resolvers.for_files(resolvers.var_value,environment.files)
+
+elseif environment.argument("format-path") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--format-path",filename)
+
+ resolvers.load()
+ logs.simple(caches.getwritablepath("format"))
+
+elseif environment.argument("pattern") then
+
+ -- luatools
+
+ runners.execute_ctx_script("mtx-base","--pattern='" .. environment.argument("pattern") .. "'",filename)
+
+elseif environment.argument("generate") then
+
+ -- luatools
+
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+
+elseif environment.argument("make") or environment.argument("ini") or environment.argument("compile") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--make",filename)
+
+ resolvers.load()
+ trackers.enable("resolvers.locating")
+ environment.make_format(filename)
+
+elseif environment.argument("run") then
+
+ -- luatools
+
+ runners.execute_ctx_script("mtx-base","--run",filename)
+
+elseif environment.argument("fmt") then
+
+ -- luatools
+
+ runners.execute_ctx_script("mtx-base","--fmt",filename)
+
+elseif environment.argument("help") and filename=='base' then
+
+ -- luatools
+
+ runners.execute_ctx_script("mtx-base","--help")
+
elseif environment.argument("help") or filename=='help' or filename == "" then
+
logs.help(messages.help)
- -- execute script
+
elseif filename:find("^bin:") then
+
+ runners.loadbase()
ok = runners.execute_program(filename)
+
elseif is_mkii_stub then
+
-- execute mkii script
+
+ runners.loadbase()
ok = runners.execute_script(filename,false,true)
-else
+
+elseif false then
+
+ runners.loadbase()
ok = runners.execute_ctx_script(filename)
if not ok then
ok = runners.execute_script(filename)
end
+
+else
+
+ runners.execute_ctx_script("mtx-base",filename)
+
+end
+
+if logs.verbose then
+ logs.simpleline()
+ logs.simple("runtime: %0.3f seconds",os.runtime())
end
-if os.platform == "unix" then
- io.write("\n")
+if os.type ~= "windows" then
+ texio.write("\n")
end
if ok == false then ok = 1 elseif ok == true then ok = 0 end
diff --git a/tex/context/base/attr-div.lua b/tex/context/base/attr-div.lua
new file mode 100644
index 000000000..2e0e34e6a
--- /dev/null
+++ b/tex/context/base/attr-div.lua
@@ -0,0 +1,649 @@
+if not modules then modules = { } end modules ['attr-div'] = {
+ version = 1.001,
+ comment = "companion to attr-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- this module is being reconstructed and code will move to other places
+-- we can also do the nsnone via a metatable and then also se index 0
+
+local type = type
+local format, gmatch = string.format, string.gmatch
+local concat = table.concat
+local texsprint = tex.sprint
+
+local report_attributes = logs.new("attributes")
+
+local ctxcatcodes = tex.ctxcatcodes
+local unsetvalue = attributes.unsetvalue
+
+-- todo: document this but first reimplement this as it reflects the early
+-- days of luatex / mkiv and we have better ways now
+
+-- nb: attributes: color etc is much slower than normal (marks + literals) but ...
+-- nb. too many "0 g"s
+
+nodes = nodes or { }
+states = states or { }
+shipouts = shipouts or { }
+
+-- We can distinguish between rules and glyphs but it's not worth the trouble. A
+-- first implementation did that and while it saves a bit for glyphs and rules, it
+-- costs more resourses for transparencies. So why bother.
+
+--
+-- colors
+--
+
+-- we can also collapse the two attributes: n, n+1, n+2 and then
+-- at the tex end add 0, 1, 2, but this is not faster and less
+-- flexible (since sometimes we freeze color attribute values at
+-- the lua end of the game
+--
+-- we also need to store the colorvalues because we need then in mp
+--
+-- This is a compromis between speed and simplicity. We used to store the
+-- values and data in one array, which made in neccessary to store the
+-- converters that need node constructor into strings and evaluate them
+-- at runtime (after reading from storage). Think of:
+--
+-- colors.strings = colors.strings or { }
+--
+-- if environment.initex then
+-- colors.strings[color] = "return colors." .. colorspace .. "(" .. concat({...},",") .. ")"
+-- end
+--
+-- storage.register("colors/data", colors.strings, "colors.data") -- evaluated
+--
+-- We assume that only processcolors are defined in the format.
+
+colors = colors or { }
+colors.data = colors.data or { }
+colors.values = colors.values or { }
+colors.registered = colors.registered or { }
+
+colors.weightgray = true
+colors.attribute = attributes.private('color')
+colors.selector = attributes.private('colormodel')
+colors.default = 1
+colors.main = nil
+colors.triggering = true
+
+storage.register("colors/values", colors.values, "colors.values")
+storage.register("colors/registered", colors.registered, "colors.registered")
+
+local templates = {
+ rgb = "r:%s:%s:%s",
+ cmyk = "c:%s:%s:%s:%s",
+ gray = "s:%s",
+ spot = "p:%s:%s:%s:%s"
+}
+
+local models = {
+ [interfaces.variables.none] = unsetvalue,
+ black = unsetvalue,
+ bw = unsetvalue,
+ all = 1,
+ gray = 2,
+ rgb = 3,
+ cmyk = 4,
+}
+
+colors.model = "all"
+
+local data = colors.data
+local values = colors.values
+local registered = colors.registered
+
+local numbers = attributes.numbers
+local list = attributes.list
+
+local min, max, floor = math.min, math.max, math.floor
+
+local nodeinjections = backends.nodeinjections
+local codeinjections = backends.codeinjections
+local registrations = backends.registrations
+
+local function rgbtocmyk(r,g,b) -- we could reduce
+ return 1-r, 1-g, 1-b, 0
+end
+
+local function cmyktorgb(c,m,y,k)
+ return 1.0 - min(1.0,c+k), 1.0 - min(1.0,m+k), 1.0 - min(1.0,y+k)
+end
+
+local function rgbtogray(r,g,b)
+ if colors.weightgray then
+ return .30*r+.59*g+.11*b
+ else
+ return r/3+g/3+b/3
+ end
+end
+
+local function cmyktogray(c,m,y,k)
+ return rgbtogray(cmyktorgb(c,m,y,k))
+end
+
+-- http://en.wikipedia.org/wiki/HSI_color_space
+-- http://nl.wikipedia.org/wiki/HSV_(kleurruimte)
+
+local function hsvtorgb(h,s,v)
+ -- h = h % 360
+ local hd = h/60
+ local hf = floor(hd)
+ local hi = hf % 6
+ -- local f = hd - hi
+ local f = hd - hf
+ local p = v * (1 - s)
+ local q = v * (1 - f * s)
+ local t = v * (1 - (1 - f) * s)
+ if hi == 0 then
+ return v, t, p
+ elseif hi == 1 then
+ return q, v, p
+ elseif hi == 2 then
+ return p, v, t
+ elseif hi == 3 then
+ return p, q, v
+ elseif hi == 4 then
+ return t, p, v
+ elseif hi == 5 then
+ return v, p, q
+ else
+ print("error in hsv -> rgb",hi,h,s,v)
+ end
+end
+
+local function rgbtohsv(r,g,b)
+ local offset, maximum, other_1, other_2
+ if r >= g and r >= b then
+ offset, maximum, other_1, other_2 = 0, r, g, b
+ elseif g >= r and g >= b then
+ offset, maximum, other_1, other_2 = 2, g, b, r
+ else
+ offset, maximum, other_1, other_2 = 4, b, r, g
+ end
+ if maximum == 0 then
+ return 0, 0, 0
+ end
+ local minimum = other_1 < other_2 and other_1 or other_2
+ if maximum == minimum then
+ return 0, 0, maximum
+ end
+ local delta = maximum - minimum
+ return (offset + (other_1-other_2)/delta)*60, delta/maximum, maximum
+end
+
+local function graytorgb(s) -- unweighted
+ return 1-s, 1-s, 1-s
+end
+
+local function hsvtogray(h,s,v)
+ return rgb_to_gray(hsv_to_rgb(h,s,v))
+end
+
+local function graytohsv(s)
+ return 0, 0, s
+end
+
+colors.rgbtocmyk = rgbtocmyk
+colors.rgbtogray = rgbtogray
+colors.cmyktorgb = cmyktorgb
+colors.cmyktogray = cmyktogray
+colors.rgbtohsv = rgbtohsv
+colors.hsvtorgb = hsvtorgb
+colors.hsvtogray = hsvtogray
+colors.graytohsv = graytohsv
+
+-- we can share some *data by using s, rgb and cmyk hashes, but
+-- normally the amount of colors is not that large; storing the
+-- components costs a bit of extra runtime, but we expect to gain
+-- some back because we have them at hand; the number indicates the
+-- default color space
+
+function colors.gray(s)
+ return { 2, s, s, s, s, 0, 0, 0, 1-s }
+end
+
+function colors.rgb(r,g,b)
+ local s = rgbtogray(r,g,b)
+ local c, m, y, k = rgbtocmyk(r,g,b)
+ return { 3, s, r, g, b, c, m, y, k }
+end
+
+function colors.cmyk(c,m,y,k)
+ local s = cmyktogray(c,m,y,k)
+ local r, g, b = cmyktorgb(c,m,y,k)
+ return { 4, s, r, g, b, c, m, y, k }
+end
+
+--~ function colors.spot(parent,f,d,p)
+--~ return { 5, .5, .5, .5, .5, 0, 0, 0, .5, parent, f, d, p }
+--~ end
+
+function colors.spot(parent,f,d,p)
+ if type(p) == "number" then
+ local n = list[numbers.color][parent] -- hard coded ref to color number
+ if n then
+ local v = values[n]
+ if v then
+ -- the via cmyk hack is dirty, but it scales better
+ local c, m, y, k = p*v[6], p*v[7], p*v[8], p*v[8]
+ local r, g, b = cmyktorgb(c,m,y,k)
+ local s = cmyktogray(c,m,y,k)
+ return { 5, s, r, g, b, c, m, y, k, parent, f, d, p }
+ end
+ end
+ else
+ -- todo, multitone (maybe p should be a table)
+ end
+ return { 5, .5, .5, .5, .5, 0, 0, 0, .5, parent, f, d, p }
+end
+
+local function graycolor(...) graycolor = nodeinjections.graycolor return graycolor(...) end
+local function rgbcolor (...) rgbcolor = nodeinjections.rgbcolor return rgbcolor (...) end
+local function cmykcolor(...) cmykcolor = nodeinjections.cmykcolor return cmykcolor(...) end
+local function spotcolor(...) spotcolor = nodeinjections.spotcolor return spotcolor(...) end
+
+local function extender(colors,key)
+ if key == "none" then
+ local d = graycolor(0)
+ colors.none = d
+ return d
+ end
+end
+
+local function reviver(data,n)
+ local v = values[n]
+ local d
+ if not v then
+ local gray = graycolor(0)
+ d = { gray, gray, gray, gray }
+ report_attributes("unable to revive color %s",n or "?")
+ else
+ local kind = v[1]
+ if kind == 2 then
+ local gray= graycolor(v[2])
+ d = { gray, gray, gray, gray }
+ elseif kind == 3 then
+ local gray, rgb, cmyk = graycolor(v[2]), rgbcolor(v[3],v[4],v[5]), cmykcolor(v[6],v[7],v[8],v[9])
+ d = { rgb, gray, rgb, cmyk }
+ elseif kind == 4 then
+ local gray, rgb, cmyk = graycolor(v[2]), rgbcolor(v[3],v[4],v[5]), cmykcolor(v[6],v[7],v[8],v[9])
+ d = { cmyk, gray, rgb, cmyk }
+ elseif kind == 5 then
+ local spot = spotcolor(v[10],v[11],v[12],v[13])
+ -- d = { spot, gray, rgb, cmyk }
+ d = { spot, spot, spot, spot }
+ end
+ end
+ data[n] = d
+ return d
+end
+
+setmetatable(colors, { __index = extender })
+setmetatable(colors.data, { __index = reviver })
+
+function colors.filter(n)
+ return concat(data[n],":",5)
+end
+
+function colors.setmodel(name,weightgray)
+ colors.model = name
+ colors.default = models[name] or 1
+ colors.weightgray = weightgray ~= false
+ return colors.default
+end
+
+function colors.register(name, colorspace, ...) -- passing 9 vars is faster (but not called that often)
+ local stamp = format(templates[colorspace],...)
+ local color = registered[stamp]
+ if not color then
+ color = #values + 1
+ values[color] = colors[colorspace](...)
+ registered[stamp] = color
+ -- colors.reviver(color)
+ end
+ if name then
+ list[colors.attribute][name] = color -- not grouped, so only global colors
+ end
+ return registered[stamp]
+end
+
+function colors.value(id)
+ return values[id]
+end
+
+shipouts.handle_color = nodes.install_attribute_handler {
+ name = "color",
+ namespace = colors,
+ initializer = states.initialize,
+ finalizer = states.finalize,
+ processor = states.selective,
+ resolver = function() return colors.main end,
+}
+
+function colors.enable()
+ tasks.enableaction("shipouts","shipouts.handle_color")
+end
+
+-- transparencies
+
+transparencies = transparencies or { }
+transparencies.registered = transparencies.registered or { }
+transparencies.data = transparencies.data or { }
+transparencies.values = transparencies.values or { }
+transparencies.triggering = true
+transparencies.attribute = attributes.private('transparency')
+
+storage.register("transparencies/registered", transparencies.registered, "transparencies.registered")
+storage.register("transparencies/values", transparencies.values, "transparencies.values")
+
+local registered = transparencies.registered -- we could use a 2 dimensional table instead
+local data = transparencies.data
+local values = transparencies.values
+local template = "%s:%s"
+
+local function inject_transparency (...)
+ inject_transparency = nodeinjections.transparency
+ return inject_transparency(...)
+end
+
+local function register_transparency(...)
+ register_transparency = registrations.transparency
+ return register_transparency(...)
+end
+
+function transparencies.register(name,a,t,force) -- name is irrelevant here (can even be nil)
+ -- Force needed here for metapost converter. We could always force
+ -- but then we'd end up with transparencies resources even if we
+ -- would not use transparencies (but define them only). This is
+ -- somewhat messy.
+ local stamp = format(template,a,t)
+ local n = registered[stamp]
+ if not n then
+ n = #values + 1
+ values[n] = { a, t }
+ registered[stamp] = n
+ if force then
+ register_transparency(n,a,t)
+ end
+ elseif force and not data[n] then
+ register_transparency(n,a,t)
+ end
+ return registered[stamp]
+end
+
+local function extender(transparencies,key)
+ if key == "none" then
+ local d = inject_transparency(0)
+ transparencies.none = d
+ return d
+ end
+end
+
+local function reviver(data,n)
+ local v = values[n]
+ local d
+ if not v then
+ d = inject_transparency(0)
+ else
+ d = inject_transparency(n)
+ register_transparency(n,v[1],v[2])
+ end
+ data[n] = d
+ return d
+end
+
+setmetatable(transparencies, { __index = extender })
+setmetatable(transparencies.data, { __index = reviver }) -- register if used
+
+-- check if there is an identity
+
+function transparencies.value(id)
+ return values[id]
+end
+
+shipouts.handle_transparency = nodes.install_attribute_handler {
+ name = "transparency",
+ namespace = transparencies,
+ initializer = states.initialize,
+ finalizer = states.finalize,
+ processor = states.process,
+}
+
+function transparencies.enable()
+ tasks.enableaction("shipouts","shipouts.handle_transparency")
+end
+
+--- colorintents: overprint / knockout
+
+colorintents = colorintents or { }
+colorintents.data = colorintents.data or { }
+colorintents.attribute = attributes.private('colorintent')
+
+colorintents.registered = {
+ overprint = 1,
+ knockout = 2,
+}
+
+local data, registered = colorintents.data, colorintents.registered
+
+local function extender(colorintents,key)
+ if key == "none" then
+ local d = data[2]
+ colorintents.none = d
+ return d
+ end
+end
+
+local function reviver(data,n)
+ if n == 1 then
+ local d = nodeinjections.overprint() -- called once
+ data[1] = d
+ return d
+ elseif n == 2 then
+ local d = nodeinjections.knockout() -- called once
+ data[2] = d
+ return d
+ end
+end
+
+setmetatable(colorintents, { __index = extender })
+setmetatable(colorintents.data, { __index = reviver })
+
+function colorintents.register(stamp)
+ return registered[stamp] or registered.overprint
+end
+
+shipouts.handle_colorintent = nodes.install_attribute_handler {
+ name = "colorintent",
+ namespace = colorintents,
+ initializer = states.initialize,
+ finalizer = states.finalize,
+ processor = states.process,
+}
+
+function colorintents.enable()
+ tasks.enableaction("shipouts","shipouts.handle_colorintent")
+end
+
+--- negative / positive
+
+negatives = negatives or { }
+negatives.data = negatives.data or { }
+negatives.attribute = attributes.private("negative")
+
+negatives.registered = {
+ positive = 1,
+ negative = 2,
+}
+
+local data, registered = negatives.data, negatives.registered
+
+local function extender(negatives,key)
+ if key == "none" then
+ local d = data[1]
+ negatives.none = d
+ return d
+ end
+end
+
+local function reviver(data,n)
+ if n == 1 then
+ local d = nodeinjections.positive() -- called once
+ data[1] = d
+ return d
+ elseif n == 2 then
+ local d = nodeinjections.negative() -- called once
+ data[2] = d
+ return d
+ end
+end
+
+setmetatable(negatives, { __index = extender })
+setmetatable(negatives.data, { __index = reviver })
+
+function negatives.register(stamp)
+ return registered[stamp] or registered.positive
+end
+
+shipouts.handle_negative = nodes.install_attribute_handler {
+ name = "negative",
+ namespace = negatives,
+ initializer = states.initialize,
+ finalizer = states.finalize,
+ processor = states.process,
+}
+
+function negatives.enable()
+ tasks.enableaction("shipouts","shipouts.handle_negative")
+end
+
+-- effects -- can be optimized (todo: metatables)
+
+effects = effects or { }
+effects.data = effects.data or { }
+effects.values = effects.values or { }
+effects.registered = effects.registered or { }
+effects.stamp = "%s:%s:%s"
+effects.attribute = attributes.private("effect")
+
+storage.register("effects/registered", effects.registered, "effects.registered")
+storage.register("effects/values", effects.values, "effects.values")
+
+local data, registered, values = effects.data, effects.registered, effects.values
+
+-- valid effects: normal inner outer both hidden (stretch,rulethickness,effect)
+
+local function effect(...) effect = nodeinjections.effect return effect(...) end
+
+local function extender(effects,key)
+ if key == "none" then
+ local d = effect(0,0,0)
+ effects.none = d
+ return d
+ end
+end
+
+local function reviver(data,n)
+ local e = values[n] -- we could nil values[n] now but hardly needed
+ local d = effect(e[1],e[2],e[3])
+ data[n] = d
+ return d
+end
+
+setmetatable(effects, { __index = extender })
+setmetatable(effects.data, { __index = reviver })
+
+function effects.register(effect,stretch,rulethickness)
+ local stamp = format(effects.stamp,effect,stretch,rulethickness)
+ local n = registered[stamp]
+ if not n then
+ n = #values + 1
+ values[n] = { effect, stretch, rulethickness }
+ registered[stamp] = n
+ end
+ return n
+end
+
+shipouts.handle_effect = nodes.install_attribute_handler {
+ name = "effect",
+ namespace = effects,
+ initializer = states.initialize,
+ finalizer = states.finalize,
+ processor = states.process,
+}
+
+function effects.enable()
+ tasks.enableaction("shipouts","shipouts.handle_effect")
+end
+
+-- layers (ugly code, due to no grouping and such); currently we use exclusive layers
+-- but when we need it stacked layers might show up too; the next function based
+-- approach can be replaced by static (metatable driven) resolvers
+
+viewerlayers = viewerlayers or { }
+viewerlayers.data = viewerlayers.data or { }
+viewerlayers.registered = viewerlayers.registered or { }
+viewerlayers.values = viewerlayers.values or { }
+viewerlayers.listwise = viewerlayers.listwise or { }
+viewerlayers.attribute = attributes.private("viewerlayer")
+
+storage.register("viewerlayers/registered", viewerlayers.registered, "viewerlayers.registered")
+storage.register("viewerlayers/values", viewerlayers.values, "viewerlayers.values")
+
+local data = viewerlayers.data
+local values = viewerlayers.values
+local listwise = viewerlayers.listwise
+local registered = viewerlayers.registered
+local template = "%s"
+
+-- stacked
+
+local function extender(viewerlayers,key)
+ if key == "none" then
+ local d = nodeinjections.stoplayer()
+ viewerlayers.none = d
+ return d
+ end
+end
+
+local function reviver(data,n)
+ local d = nodeinjections.startlayer(values[n])
+ data[n] = d
+ return d
+end
+
+setmetatable(viewerlayers, { __index = extender })
+setmetatable(viewerlayers.data, { __index = reviver })
+
+local function initializer(...)
+ return states.initialize(...)
+end
+
+viewerlayers.register = function(name,lw) -- if not inimode redefine data[n] in first call
+ local stamp = format(template,name)
+ local n = registered[stamp]
+ if not n then
+ n = #values + 1
+ values[n] = name
+ registered[stamp] = n
+ listwise[n] = lw or false
+ end
+ return registered[stamp] -- == n
+end
+
+shipouts.handle_viewerlayer = nodes.install_attribute_handler {
+ name = "viewerlayer",
+ namespace = viewerlayers,
+ initializer = initializer,
+ finalizer = states.finalize,
+ processor = states.stacked,
+}
+
+function viewerlayers.enable()
+ tasks.enableaction("shipouts","shipouts.handle_viewerlayer")
+end
diff --git a/tex/context/base/attr-div.mkiv b/tex/context/base/attr-div.mkiv
new file mode 100644
index 000000000..dea223da0
--- /dev/null
+++ b/tex/context/base/attr-div.mkiv
@@ -0,0 +1,136 @@
+%D \module
+%D [ file=attr-div,
+%D version=2007.06.06,
+%D title=\CONTEXT\ Attribute Macros,
+%D subtitle=Diverse,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright=PRAGMA-ADE]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Attribute Macros / Diverse}
+
+%D This code will mnve.
+
+\unprotect
+
+\registerctxluafile{attr-div}{1.001}
+
+% \definesystemattribute[ignore]
+%
+% \edef\startignorecontent{\dosetattribute{ignore}\plusone}
+% \edef\stopignorecontent {\doresetattribute{ignore}}
+
+% todo: no need for 'color' argument, we can set that once at startup; currently
+% a bit inconsistent
+
+% 1=off 2=gray 3=spot 4=rgb 5=cmyk 6=cmy % only 1/2/4/5 are supported
+%
+% We could combine this in one attribute but this is not faster and also
+% less flexible because sometimes we want to freeze the attribute bit.
+%
+% Watch out: real color support will be implemented later.
+
+\newcount\currentcolormodel
+
+\def\dosetcolormodel#1%
+ {\currentcolormodel\ctxlua{tex.print(colors.setmodel('#1'))}%
+ \attribute\colormodelattribute\currentcolormodel}
+
+\dosetcolormodel{all}
+
+\appendtoks
+ \dosetcolormodel{all}% redundant?
+\to \everyjob
+
+\def\registerrgbcolor #1#2#3#4{\ctxlua{colors.register('#1','rgb' ,#2,#3,#4)}}
+\def\registercmykcolor#1#2#3#4#5{\ctxlua{colors.register('#1','cmyk',#2,#3,#4,#5)}}
+\def\registergraycolor #1#2{\ctxlua{colors.register('#1','gray',#2)}}
+
+% transparency
+
+\def\registertransparency#1#2#3%
+ {\setevalue{(ts:#1)}{\attribute\transparencyattribute\ctxlua{tex.write(transparencies.register(#2,#3))} }}
+
+\def\sometransparencyswitch#1{\csname(ts:#1)\endcsname}
+
+\def\sometransparencyswitch
+ {\ctxlua{transparencies.enable()}%
+ \gdef\sometransparencyswitch##1{\csname(ts:##1)\endcsname}%
+ \sometransparencyswitch}
+
+% \registertransparency {one} {1} {.5}
+% \registertransparency {two} {1} {.6}
+
+% overprint
+
+\def\registercolorintent#1#2%
+ {\setevalue{(os:#1)}{\attribute\colorintentattribute\ctxlua{tex.write(colorintents.register('#2'))} }}
+
+\def\dotriggercolorintent
+ {\ctxlua{colorintents.enable()}%
+ \gdef\dotriggercolorintent##1{\csname(os:##1)\endcsname}%
+ \dotriggercolorintent}
+
+\registercolorintent{knockout} {knockout}
+\registercolorintent{overprint}{overprint}
+
+\installattributestack\colorintentattribute
+
+\setevalue{(os:#\v!none}{\attribute\colorintentattribute\attributeunsetvalue} % does this work out ok?
+
+% negative
+
+\def\registernegative#1#2%
+ {\setevalue{(ns:#1)}{\attribute\negativeattribute\ctxlua{tex.write(negatives.register('#2'))} }}
+
+\def\dotriggernegative
+ {\ctxlua{negatives.enable()}%
+ \gdef\dotriggernegative##1{\csname(ns:##1)\endcsname}%
+ \dotriggernegative}
+
+\registernegative{positive}{positive}
+\registernegative{negative}{negative}
+
+% effect
+
+\def\registereffect#1#2#3% #2=stretch #3=rulethickness
+ {\setxvalue{(es:#1:#2:\number\dimexpr#3\relax)}%
+ {\attribute\effectattribute\ctxlua{tex.write(effects.register('#1',#2,\number\dimexpr#3\relax))} }}
+
+\def\dotriggereffect
+ {\ctxlua{effects.enable()}%
+ \gdef\dotriggereffect##1##2##3%
+ {\ifcsname(es:##1:##2:\number\dimexpr##3\relax)\endcsname\else\registereffect{##1}{##2}{##3}\fi
+ \csname(es:##1:##2:\number\dimexpr##3\relax)\endcsname}%
+ \dotriggereffect}
+
+% \registereffect{normal}
+% \registereffect{inner}
+% \registereffect{outer}
+% \registereffect{both}
+% \registereffect{hidden}
+
+% viewerlayers (will probably change a bit)
+
+% needs to work over stopitemize grouping etc
+
+\def\registerviewerlayer#1#2% global !
+ {\setxvalue{(vl:#1)}{\global\attribute\viewerlayerattribute\ctxlua{tex.write(viewerlayers.register('#2'))} }}
+
+\setevalue{(vl:)}{\global\attribute\viewerlayerattribute\attributeunsetvalue}
+
+\def\dotriggerviewerlayer
+ {\ctxlua{viewerlayers.enable()}%
+ \gdef\dotriggerviewerlayer##1{\csname(vl:##1)\endcsname}%
+ \dotriggerviewerlayer}
+
+\protect \endinput
+
+% test case
+%
+% {\green \hbox to \hsize{\leaders\hrule \hfill a}\par}
+% {\red \hbox to \hsize{\leaders\hbox{x}\hfill a}\par}
diff --git a/tex/context/base/attr-ini.lua b/tex/context/base/attr-ini.lua
index 81c2f4744..27d7fdd90 100644
--- a/tex/context/base/attr-ini.lua
+++ b/tex/context/base/attr-ini.lua
@@ -6,643 +6,64 @@ if not modules then modules = { } end modules ['attr-ini'] = {
license = "see context related readme files"
}
--- this module is being reconstructed
--- we can also do the nsnone via a metatable and then also se index 0
+local next, type = next, type
-local type = type
-local format, gmatch = string.format, string.gmatch
-local concat = table.concat
-local texsprint = tex.sprint
+--[[ldx--
+<p>We start with a registration system for atributes so that we can use the
+symbolic names later on.</p>
+--ldx]]--
-local ctxcatcodes = tex.ctxcatcodes
-local unsetvalue = attributes.unsetvalue
+attributes = attributes or { }
--- todo: document this but first reimplement this as it reflects the early
--- days of luatex / mkiv and we have better ways now
+attributes.names = attributes.names or { }
+attributes.numbers = attributes.numbers or { }
+attributes.list = attributes.list or { }
+attributes.unsetvalue = -0x7FFFFFFF
--- nb: attributes: color etc is much slower than normal (marks + literals) but ...
--- nb. too many "0 g"s
+storage.register("attributes/names", attributes.names, "attributes.names")
+storage.register("attributes/numbers", attributes.numbers, "attributes.numbers")
+storage.register("attributes/list", attributes.list, "attributes.list")
-nodes = nodes or { }
-states = states or { }
-shipouts = shipouts or { }
+local names, numbers, list = attributes.names, attributes.numbers, attributes.list
--- We can distinguish between rules and glyphs but it's not worth the trouble. A
--- first implementation did that and while it saves a bit for glyphs and rules, it
--- costs more resourses for transparencies. So why bother.
-
---
--- colors
---
-
--- we can also collapse the two attributes: n, n+1, n+2 and then
--- at the tex end add 0, 1, 2, but this is not faster and less
--- flexible (since sometimes we freeze color attribute values at
--- the lua end of the game
---
--- we also need to store the colorvalues because we need then in mp
---
--- This is a compromis between speed and simplicity. We used to store the
--- values and data in one array, which made in neccessary to store the
--- converters that need node constructor into strings and evaluate them
--- at runtime (after reading from storage). Think of:
---
--- colors.strings = colors.strings or { }
---
--- if environment.initex then
--- colors.strings[color] = "return colors." .. colorspace .. "(" .. concat({...},",") .. ")"
--- end
---
--- storage.register("colors/data", colors.strings, "colors.data") -- evaluated
---
--- We assume that only processcolors are defined in the format.
-
-colors = colors or { }
-colors.data = colors.data or { }
-colors.values = colors.values or { }
-colors.registered = colors.registered or { }
-
-colors.weightgray = true
-colors.attribute = attributes.private('color')
-colors.selector = attributes.private('colormodel')
-colors.default = 1
-colors.main = nil
-colors.triggering = true
-
-storage.register("colors/values", colors.values, "colors.values")
-storage.register("colors/registered", colors.registered, "colors.registered")
-
-local templates = {
- rgb = "r:%s:%s:%s",
- cmyk = "c:%s:%s:%s:%s",
- gray = "s:%s",
- spot = "p:%s:%s:%s:%s"
-}
-
-local models = {
- [interfaces.variables.none] = unsetvalue,
- black = unsetvalue,
- bw = unsetvalue,
- all = 1,
- gray = 2,
- rgb = 3,
- cmyk = 4,
-}
-
-colors.model = "all"
-
-local data = colors.data
-local values = colors.values
-local registered = colors.registered
-
-local numbers = attributes.numbers
-local list = attributes.list
-
-local min, max, floor = math.min, math.max, math.floor
-
-local nodeinjections = backends.nodeinjections
-local codeinjections = backends.codeinjections
-local registrations = backends.registrations
-
-local function rgbtocmyk(r,g,b) -- we could reduce
- return 1-r, 1-g, 1-b, 0
-end
-
-local function cmyktorgb(c,m,y,k)
- return 1.0 - min(1.0,c+k), 1.0 - min(1.0,m+k), 1.0 - min(1.0,y+k)
-end
-
-local function rgbtogray(r,g,b)
- if colors.weightgray then
- return .30*r+.59*g+.11*b
- else
- return r/3+g/3+b/3
- end
-end
-
-local function cmyktogray(c,m,y,k)
- return rgbtogray(cmyktorgb(c,m,y,k))
-end
-
--- http://en.wikipedia.org/wiki/HSI_color_space
--- http://nl.wikipedia.org/wiki/HSV_(kleurruimte)
-
-
-local function hsvtorgb(h,s,v)
- -- h = h % 360
- local hd = h/60
- local hf = floor(hd)
- local hi = hf % 6
- -- local f = hd - hi
- local f = hd - hf
- local p = v * (1 - s)
- local q = v * (1 - f * s)
- local t = v * (1 - (1 - f) * s)
- if hi == 0 then
- return v, t, p
- elseif hi == 1 then
- return q, v, p
- elseif hi == 2 then
- return p, v, t
- elseif hi == 3 then
- return p, q, v
- elseif hi == 4 then
- return t, p, v
- elseif hi == 5 then
- return v, p, q
- else
- print("error in hsv -> rgb",hi,h,s,v)
- end
-end
-
-function rgbtohsv(r,g,b)
- local offset, maximum, other_1, other_2
- if r >= g and r >= b then
- offset, maximum, other_1, other_2 = 0, r, g, b
- elseif g >= r and g >= b then
- offset, maximum, other_1, other_2 = 2, g, b, r
- else
- offset, maximum, other_1, other_2 = 4, b, r, g
- end
- if maximum == 0 then
- return 0, 0, 0
- end
- local minimum = other_1 < other_2 and other_1 or other_2
- if maximum == minimum then
- return 0, 0, maximum
- end
- local delta = maximum - minimum
- return (offset + (other_1-other_2)/delta)*60, delta/maximum, maximum
-end
-
-function graytorgb(s) -- unweighted
- return 1-s, 1-s, 1-s
-end
-
-function hsvtogray(h,s,v)
- return rgb_to_gray(hsv_to_rgb(h,s,v))
-end
-
-function grayto_hsv(s)
- return 0, 0, s
-end
-
-colors.rgbtocmyk = rgbtocmyk
-colors.rgbtogray = rgbtogray
-colors.cmyktorgb = cmyktorgb
-colors.cmyktogray = cmyktogray
-colors.rgbtohsv = rgbtohsv
-colors.hsvtorgb = hsvtorgb
-colors.hsvtogray = hsvtogray
-colors.graytohsv = graytohsv
-
--- we can share some *data by using s, rgb and cmyk hashes, but
--- normally the amount of colors is not that large; storing the
--- components costs a bit of extra runtime, but we expect to gain
--- some back because we have them at hand; the number indicates the
--- default color space
-
-function colors.gray(s)
- return { 2, s, s, s, s, 0, 0, 0, 1-s }
-end
-
-function colors.rgb(r,g,b)
- local s = rgbtogray(r,g,b)
- local c, m, y, k = rgbtocmyk(r,g,b)
- return { 3, s, r, g, b, c, m, y, k }
-end
-
-function colors.cmyk(c,m,y,k)
- local s = cmyktogray(c,m,y,k)
- local r, g, b = cmyktorgb(c,m,y,k)
- return { 4, s, r, g, b, c, m, y, k }
-end
-
---~ function colors.spot(parent,f,d,p)
---~ return { 5, .5, .5, .5, .5, 0, 0, 0, .5, parent, f, d, p }
---~ end
-
-function colors.spot(parent,f,d,p)
- if type(p) == "number" then
- local n = list[numbers.color][parent] -- hard coded ref to color number
- if n then
- local v = values[n]
- if v then
- -- the via cmyk hack is dirty, but it scales better
- local c, m, y, k = p*v[6], p*v[7], p*v[8], p*v[8]
- local r, g, b = cmyktorgb(c,m,y,k)
- local s = cmyktogray(c,m,y,k)
- return { 5, s, r, g, b, c, m, y, k, parent, f, d, p }
- end
- end
- else
- -- todo, multitone (maybe p should be a table)
+function attributes.define(name,number) -- at the tex end
+ if not numbers[name] then
+ numbers[name], names[number], list[number] = number, name, { }
end
- return { 5, .5, .5, .5, .5, 0, 0, 0, .5, parent, f, d, p }
end
-local function graycolor(...) graycolor = nodeinjections.graycolor return graycolor(...) end
-local function rgbcolor (...) rgbcolor = nodeinjections.rgbcolor return rgbcolor (...) end
-local function cmykcolor(...) cmykcolor = nodeinjections.cmykcolor return cmykcolor(...) end
-local function spotcolor(...) spotcolor = nodeinjections.spotcolor return spotcolor(...) end
+--[[ldx--
+<p>We can use the attributes in the range 127-255 (outside user space). These
+are only used when no attribute is set at the \TEX\ end which normally
+happens in <l n='context'/>.</p>
+--ldx]]--
-local function extender(colors,key)
- if key == "none" then
- local d = graycolor(0)
- colors.none = d
- return d
- end
-end
+storage.shared.attributes_last_private = storage.shared.attributes_last_private or 127
-local function reviver(data,n)
- local v = values[n]
- local d
- if not v then
- local gray = graycolor(0)
- d = { gray, gray, gray, gray }
- logs.report("attributes","unable to revive color %s",n or "?")
- else
- local kind = v[1]
- if kind == 2 then
- local gray= graycolor(v[2])
- d = { gray, gray, gray, gray }
- elseif kind == 3 then
- local gray, rgb, cmyk = graycolor(v[2]), rgbcolor(v[3],v[4],v[5]), cmykcolor(v[6],v[7],v[8],v[9])
- d = { rgb, gray, rgb, cmyk }
- elseif kind == 4 then
- local gray, rgb, cmyk = graycolor(v[2]), rgbcolor(v[3],v[4],v[5]), cmykcolor(v[6],v[7],v[8],v[9])
- d = { cmyk, gray, rgb, cmyk }
- elseif kind == 5 then
- local spot = spotcolor(v[10],v[11],v[12],v[13])
- -- d = { spot, gray, rgb, cmyk }
- d = { spot, spot, spot, spot }
+function attributes.private(name) -- at the lua end (hidden from user)
+ local number = numbers[name]
+ if not number then
+ local last = storage.shared.attributes_last_private or 127
+ if last < 255 then
+ last = last + 1
+ storage.shared.attributes_last_private = last
end
+ number = last
+ numbers[name], names[number], list[number] = number, name, { }
end
- data[n] = d
- return d
+ return number
end
-setmetatable(colors, { __index = extender })
-setmetatable(colors.data, { __index = reviver })
-
-function colors.filter(n)
- return concat(data[n],":",5)
-end
-
-function colors.setmodel(name,weightgray)
- colors.model = name
- colors.default = models[name] or 1
- colors.weightgray = weightgray ~= false
- return colors.default
-end
-
-function colors.register(name, colorspace, ...) -- passing 9 vars is faster (but not called that often)
- local stamp = format(templates[colorspace],...)
- local color = registered[stamp]
- if not color then
- color = #values + 1
- values[color] = colors[colorspace](...)
- registered[stamp] = color
- -- colors.reviver(color)
- end
- if name then
- list[colors.attribute][name] = color -- not grouped, so only global colors
- end
- return registered[stamp]
-end
-
-function colors.value(id)
- return values[id]
-end
+-- new
-shipouts.handle_color = nodes.install_attribute_handler {
- name = "color",
- namespace = colors,
- initializer = states.initialize,
- finalizer = states.finalize,
- processor = states.selective,
- resolver = function() return colors.main end,
-}
-
-function colors.enable()
- tasks.enableaction("shipouts","shipouts.handle_color")
-end
-
--- transparencies
-
-transparencies = transparencies or { }
-transparencies.registered = transparencies.registered or { }
-transparencies.data = transparencies.data or { }
-transparencies.values = transparencies.values or { }
-transparencies.triggering = true
-transparencies.attribute = attributes.private('transparency')
-
-storage.register("transparencies/registered", transparencies.registered, "transparencies.registered")
-storage.register("transparencies/values", transparencies.values, "transparencies.values")
-
-local registered = transparencies.registered -- we could use a 2 dimensional table instead
-local data = transparencies.data
-local values = transparencies.values
-local template = "%s:%s"
-
-local function inject_transparency (...)
- inject_transparency = nodeinjections.transparency
- return inject_transparency(...)
-end
-
-local function register_transparency(...)
- register_transparency = registrations.transparency
- return register_transparency(...)
-end
-
-function transparencies.register(name,a,t,force) -- name is irrelevant here (can even be nil)
- -- Force needed here for metapost converter. We could always force
- -- but then we'd end up with transparencies resources even if we
- -- would not use transparencies (but define them only). This is
- -- somewhat messy.
- local stamp = format(template,a,t)
- local n = registered[stamp]
- if not n then
- n = #values + 1
- values[n] = { a, t }
- registered[stamp] = n
- if force then
- register_transparency(n,a,t)
+function attributes.ofnode(n)
+ local a = n.attr
+ if a then
+ a = a.next
+ while a do
+ local number, value = a.number, a.value
+ texio.write_nl(format("%s : attribute %3i, value %4i, name %s",tostring(n),number,value,names[number] or '?'))
+ a = a.next
end
- elseif force and not data[n] then
- register_transparency(n,a,t)
- end
- return registered[stamp]
-end
-
-local function extender(transparencies,key)
- if key == "none" then
- local d = inject_transparency(0)
- transparencies.none = d
- return d
- end
-end
-
-local function reviver(data,n)
- local v = values[n]
- local d
- if not v then
- d = inject_transparency(0)
- else
- d = inject_transparency(n)
- register_transparency(n,v[1],v[2])
- end
- data[n] = d
- return d
-end
-
-setmetatable(transparencies, { __index = extender })
-setmetatable(transparencies.data, { __index = reviver }) -- register if used
-
--- check if there is an identity
-
-function transparencies.value(id)
- return values[id]
-end
-
-shipouts.handle_transparency = nodes.install_attribute_handler {
- name = "transparency",
- namespace = transparencies,
- initializer = states.initialize,
- finalizer = states.finalize,
- processor = states.process,
-}
-
-function transparencies.enable()
- tasks.enableaction("shipouts","shipouts.handle_transparency")
-end
-
---- colorintents: overprint / knockout
-
-colorintents = colorintents or { }
-colorintents.data = colorintents.data or { }
-colorintents.attribute = attributes.private('colorintent')
-
-colorintents.registered = {
- overprint = 1,
- knockout = 2,
-}
-
-local data, registered = colorintents.data, colorintents.registered
-
-local function extender(colorintents,key)
- if key == "none" then
- local d = data[2]
- colorintents.none = d
- return d
- end
-end
-
-local function reviver(data,n)
- if n == 1 then
- local d = nodeinjections.overprint() -- called once
- data[1] = d
- return d
- elseif n == 2 then
- local d = nodeinjections.knockout() -- called once
- data[2] = d
- return d
- end
-end
-
-setmetatable(colorintents, { __index = extender })
-setmetatable(colorintents.data, { __index = reviver })
-
-function colorintents.register(stamp)
- return registered[stamp] or registered.overprint
-end
-
-shipouts.handle_colorintent = nodes.install_attribute_handler {
- name = "colorintent",
- namespace = colorintents,
- initializer = states.initialize,
- finalizer = states.finalize,
- processor = states.process,
-}
-
-function colorintents.enable()
- tasks.enableaction("shipouts","shipouts.handle_colorintent")
-end
-
---- negative / positive
-
-negatives = negatives or { }
-negatives.data = negatives.data or { }
-negatives.attribute = attributes.private("negative")
-
-negatives.registered = {
- positive = 1,
- negative = 2,
-}
-
-local data, registered = negatives.data, negatives.registered
-
-local function extender(negatives,key)
- if key == "none" then
- local d = data[1]
- negatives.none = d
- return d
- end
-end
-
-local function reviver(data,n)
- if n == 1 then
- local d = nodeinjections.positive() -- called once
- data[1] = d
- return d
- elseif n == 2 then
- local d = nodeinjections.negative() -- called once
- data[2] = d
- return d
- end
-end
-
-setmetatable(negatives, { __index = extender })
-setmetatable(negatives.data, { __index = reviver })
-
-function negatives.register(stamp)
- return registered[stamp] or registered.positive
-end
-
-shipouts.handle_negative = nodes.install_attribute_handler {
- name = "negative",
- namespace = negatives,
- initializer = states.initialize,
- finalizer = states.finalize,
- processor = states.process,
-}
-
-function negatives.enable()
- tasks.enableaction("shipouts","shipouts.handle_negative")
-end
-
--- effects -- can be optimized (todo: metatables)
-
-effects = effects or { }
-effects.data = effects.data or { }
-effects.values = effects.values or { }
-effects.registered = effects.registered or { }
-effects.stamp = "%s:%s:%s"
-effects.attribute = attributes.private("effect")
-
-storage.register("effects/registered", effects.registered, "effects.registered")
-storage.register("effects/values", effects.values, "effects.values")
-
-local data, registered, values = effects.data, effects.registered, effects.values
-
--- valid effects: normal inner outer both hidden (stretch,rulethickness,effect)
-
-local function effect(...) effect = nodeinjections.effect return effect(...) end
-
-local function extender(effects,key)
- if key == "none" then
- local d = effect(0,0,0)
- effects.none = d
- return d
- end
-end
-
-local function reviver(data,n)
- local e = values[n] -- we could nil values[n] now but hardly needed
- local d = effect(e[1],e[2],e[3])
- data[n] = d
- return d
-end
-
-setmetatable(effects, { __index = extender })
-setmetatable(effects.data, { __index = reviver })
-
-function effects.register(effect,stretch,rulethickness)
- local stamp = format(effects.stamp,effect,stretch,rulethickness)
- local n = registered[stamp]
- if not n then
- n = #values + 1
- values[n] = { effect, stretch, rulethickness }
- registered[stamp] = n
- end
- return n
-end
-
-shipouts.handle_effect = nodes.install_attribute_handler {
- name = "effect",
- namespace = effects,
- initializer = states.initialize,
- finalizer = states.finalize,
- processor = states.process,
-}
-
-function effects.enable()
- tasks.enableaction("shipouts","shipouts.handle_effect")
-end
-
--- layers (ugly code, due to no grouping and such); currently we use exclusive layers
--- but when we need it stacked layers might show up too; the next function based
--- approach can be replaced by static (metatable driven) resolvers
-
-viewerlayers = viewerlayers or { }
-viewerlayers.data = viewerlayers.data or { }
-viewerlayers.registered = viewerlayers.registered or { }
-viewerlayers.values = viewerlayers.values or { }
-viewerlayers.listwise = viewerlayers.listwise or { }
-viewerlayers.attribute = attributes.private("viewerlayer")
-
-storage.register("viewerlayers/registered", viewerlayers.registered, "viewerlayers.registered")
-storage.register("viewerlayers/values", viewerlayers.values, "viewerlayers.values")
-
-local data = viewerlayers.data
-local values = viewerlayers.values
-local listwise = viewerlayers.listwise
-local registered = viewerlayers.registered
-local template = "%s"
-
--- stacked
-
-local function extender(viewerlayers,key)
- if key == "none" then
- local d = nodeinjections.stoplayer()
- viewerlayers.none = d
- return d
- end
-end
-
-local function reviver(data,n)
- local d = nodeinjections.startlayer(values[n])
- data[n] = d
- return d
-end
-
-setmetatable(viewerlayers, { __index = extender })
-setmetatable(viewerlayers.data, { __index = reviver })
-
-local function initializer(...)
- return states.initialize(...)
-end
-
-viewerlayers.register = function(name,lw) -- if not inimode redefine data[n] in first call
- local stamp = format(template,name)
- local n = registered[stamp]
- if not n then
- n = #values + 1
- values[n] = name
- registered[stamp] = n
- listwise[n] = lw or false
- end
- return registered[stamp] -- == n
-end
-
-shipouts.handle_viewerlayer = nodes.install_attribute_handler {
- name = "viewerlayer",
- namespace = viewerlayers,
- initializer = initializer,
- finalizer = states.finalize,
- processor = states.stacked,
-}
-
-function viewerlayers.enable()
- tasks.enableaction("shipouts","shipouts.handle_viewerlayer")
+ end
end
diff --git a/tex/context/base/attr-ini.mkiv b/tex/context/base/attr-ini.mkiv
index 87d06c48a..15ace0145 100644
--- a/tex/context/base/attr-ini.mkiv
+++ b/tex/context/base/attr-ini.mkiv
@@ -20,8 +20,6 @@
\registerctxluafile{attr-ini}{1.001}
-%D This might move:
-
\def\pushattribute#1%
{\global\advance\csname\??ae:\string#1\endcsname\plusone
\global\expandafter\mathchardef\csname\??ae:\string#1:\number\csname\??ae:\string#1\endcsname\endcsname\attribute#1}
@@ -33,6 +31,41 @@
\def\installattributestack#1%
{\expandafter\newcount\csname\??ae:\string#1\endcsname}
+\newtoks \attributesresetlist
+
+\ifdefined \v!global \else \def\v!global{global} \fi % for metatex
+
+\unexpanded\def\defineattribute
+ {\dodoubleempty\dodefineattribute}
+
+\def\dodefineattribute[#1][#2]% alternatively we can let lua do the housekeeping
+ {\expandafter\newattribute\csname @attr@#1\endcsname
+ \expandafter \xdef\csname :attr:#1\endcsname{\number\lastallocatedattribute}%
+ \ctxlua{attributes.define("#1",\number\lastallocatedattribute)}%
+ %\writestatus\m!systems{defining attribute #1 with number \number\lastallocatedattribute}%
+ \doifnotinset\v!global{#2}{\appendetoks\csname @attr@#1\endcsname\attributeunsetvalue\to\attributesresetlist}}
+
+\unexpanded\def\definesystemattribute
+ {\dodoubleempty\dodefinesystemattribute}
+
+\def\dodefinesystemattribute[#1][#2]% alternatively we can let lua do the housekeeping
+ {\scratchcounter\ctxlua{tex.print(attributes.private("#1"))}\relax
+ \global\expandafter\attributedef\csname @attr@#1\endcsname\scratchcounter
+ \expandafter \xdef\csname :attr:#1\endcsname{\number\scratchcounter}%
+ %\writestatus\m!systems{defining system attribute #1 with number \number\scratchcounter}%
+ \doifnotinset\v!global{#2}{\appendetoks\csname @attr@#1\endcsname\attributeunsetvalue\to\attributesresetlist}}
+
+% expandable so we can \edef them for speed
+
+\def\dosetattribute#1#2{\csname @attr@#1\endcsname#2\relax}
+\def\doresetattribute#1{\csname @attr@#1\endcsname\attributeunsetvalue}
+\def\dogetattribute #1{\number\csname @attr@#1\endcsname}
+\def\dogetattributeid#1{\csname :attr:#1\endcsname}
+
+\let\dompattribute\gobbletwoarguments
+
+\def\resetallattributes{\the\attributesresetlist}
+
%D For the moment we put this here (later it will move to where it's used):
\definesystemattribute[state]
@@ -53,118 +86,4 @@
\definesystemattribute[ruled] \chardef\ruledattribute \dogetattributeid{ruled}
\definesystemattribute[shifted] \chardef\shiftedattribute \dogetattributeid{shifted}
-% \definesystemattribute[ignore]
-%
-% \edef\startignorecontent{\dosetattribute{ignore}\plusone}
-% \edef\stopignorecontent {\doresetattribute{ignore}}
-
-% todo: no need for 'color' argument, we can set that once at startup; currently
-% a bit inconsistent
-
-% 1=off 2=gray 3=spot 4=rgb 5=cmyk 6=cmy % only 1/2/4/5 are supported
-%
-% We could combine this in one attribute but this is not faster and also
-% less flexible because sometimes we want to freeze the attribute bit.
-%
-% Watch out: real color support will be implemented later.
-
-\newcount\currentcolormodel
-
-\def\dosetcolormodel#1%
- {\currentcolormodel\ctxlua{tex.print(colors.setmodel('#1'))}%
- \attribute\colormodelattribute\currentcolormodel}
-
-\dosetcolormodel{all}
-
-\appendtoks
- \dosetcolormodel{all}% redundant?
-\to \everyjob
-
-\def\registerrgbcolor #1#2#3#4{\ctxlua{colors.register('#1','rgb' ,#2,#3,#4)}}
-\def\registercmykcolor#1#2#3#4#5{\ctxlua{colors.register('#1','cmyk',#2,#3,#4,#5)}}
-\def\registergraycolor #1#2{\ctxlua{colors.register('#1','gray',#2)}}
-
-% transparency
-
-\def\registertransparency#1#2#3%
- {\setevalue{(ts:#1)}{\attribute\transparencyattribute\ctxlua{tex.write(transparencies.register(#2,#3))} }}
-
-\def\sometransparencyswitch#1{\csname(ts:#1)\endcsname}
-
-\def\sometransparencyswitch
- {\ctxlua{transparencies.enable()}%
- \gdef\sometransparencyswitch##1{\csname(ts:##1)\endcsname}%
- \sometransparencyswitch}
-
-% \registertransparency {one} {1} {.5}
-% \registertransparency {two} {1} {.6}
-
-% overprint
-
-\def\registercolorintent#1#2%
- {\setevalue{(os:#1)}{\attribute\colorintentattribute\ctxlua{tex.write(colorintents.register('#2'))} }}
-
-\def\dotriggercolorintent
- {\ctxlua{colorintents.enable()}%
- \gdef\dotriggercolorintent##1{\csname(os:##1)\endcsname}%
- \dotriggercolorintent}
-
-\registercolorintent{knockout} {knockout}
-\registercolorintent{overprint}{overprint}
-
-\installattributestack\colorintentattribute
-
-\setevalue{(os:#\v!none}{\attribute\colorintentattribute\attributeunsetvalue} % does this work out ok?
-
-% negative
-
-\def\registernegative#1#2%
- {\setevalue{(ns:#1)}{\attribute\negativeattribute\ctxlua{tex.write(negatives.register('#2'))} }}
-
-\def\dotriggernegative
- {\ctxlua{negatives.enable()}%
- \gdef\dotriggernegative##1{\csname(ns:##1)\endcsname}%
- \dotriggernegative}
-
-\registernegative{positive}{positive}
-\registernegative{negative}{negative}
-
-% effect
-
-\def\registereffect#1#2#3% #2=stretch #3=rulethickness
- {\setxvalue{(es:#1:#2:\number\dimexpr#3\relax)}%
- {\attribute\effectattribute\ctxlua{tex.write(effects.register('#1',#2,\number\dimexpr#3\relax))} }}
-
-\def\dotriggereffect
- {\ctxlua{effects.enable()}%
- \gdef\dotriggereffect##1##2##3%
- {\ifcsname(es:##1:##2:\number\dimexpr##3\relax)\endcsname\else\registereffect{##1}{##2}{##3}\fi
- \csname(es:##1:##2:\number\dimexpr##3\relax)\endcsname}%
- \dotriggereffect}
-
-% \registereffect{normal}
-% \registereffect{inner}
-% \registereffect{outer}
-% \registereffect{both}
-% \registereffect{hidden}
-
-% viewerlayers (will probably change a bit)
-
-% needs to work over stopitemize grouping etc
-
-\def\registerviewerlayer#1#2% global !
- {\setxvalue{(vl:#1)}{\global\attribute\viewerlayerattribute\ctxlua{tex.write(viewerlayers.register('#2'))} }}
-
-\setevalue{(vl:)}{\global\attribute\viewerlayerattribute\attributeunsetvalue}
-
-\def\dotriggerviewerlayer
- {\ctxlua{viewerlayers.enable()}%
- \gdef\dotriggerviewerlayer##1{\csname(vl:##1)\endcsname}%
- \dotriggerviewerlayer}
-
\protect \endinput
-
-% test case
-%
-% {\green \hbox to \hsize{\leaders\hrule \hfill a}\par}
-% {\red \hbox to \hsize{\leaders\hbox{x}\hfill a}\par}
diff --git a/tex/context/base/back-ini.lua b/tex/context/base/back-ini.lua
index 12a487dd4..243e3fbd5 100644
--- a/tex/context/base/back-ini.lua
+++ b/tex/context/base/back-ini.lua
@@ -8,9 +8,9 @@ if not modules then modules = { } end modules ['back-ini'] = {
backends = backends or { }
-local trace_backend = false
+local trace_backend = false local function nothing() return nil end
-local function nothing() return nil end
+local report_backends = logs.new("backends")
backends.nothing = nothing
@@ -107,7 +107,7 @@ function backends.install(what)
local backend = backends[what]
if backend then
if trace_backend then
- logs.report("backend", "initializing backend %s (%s)",what,backend.comment or "no comment")
+ report_backends("initializing backend %s (%s)",what,backend.comment or "no comment")
end
backends.current = what
for _, category in next, { "nodeinjections", "codeinjections", "registrations"} do
@@ -117,18 +117,18 @@ function backends.install(what)
for name, meaning in next, whereto do
if plugin[name] then
whereto[name] = plugin[name]
- -- logs.report("backend", "installing function %s in category %s of %s",name,category,what)
+ -- report_backends("installing function %s in category %s of %s",name,category,what)
elseif trace_backend then
- logs.report("backend", "no function %s in category %s of %s",name,category,what)
+ report_backends("no function %s in category %s of %s",name,category,what)
end
end
elseif trace_backend then
- logs.report("backend", "no category %s in %s",category,what)
+ report_backends("no category %s in %s",category,what)
end
end
backends.helpers = backend.helpers
elseif trace_backend then
- logs.report("backend", "no backend named %s",what)
+ report_backends("no backend named %s",what)
end
end
end
diff --git a/tex/context/base/back-pdf.lua b/tex/context/base/back-pdf.lua
index 54e22f1a2..323d23a65 100644
--- a/tex/context/base/back-pdf.lua
+++ b/tex/context/base/back-pdf.lua
@@ -237,7 +237,7 @@ local function registersomespotcolor(name,noffractions,names,p,colorspace,range,
end
end
-function registersomeindexcolor(name,noffractions,names,p,colorspace,range,funct)
+local function registersomeindexcolor(name,noffractions,names,p,colorspace,range,funct)
noffractions = tonumber(noffractions) or 1 -- to be checked
local cnames = pdfarray()
local domain = pdfarray()
@@ -289,13 +289,11 @@ end
local function delayindexcolor(name,names,func)
local hash = (names ~= "" and names) or name
- -- logs.report("index colors","delaying '%s'",name)
delayedindexcolors[hash] = func
end
local function indexcolorref(name) -- actually, names (parent) is the hash
if not indexcolorhash[name] then
- -- logs.report("index colors","registering '%s'",name)
local delayedindexcolor = delayedindexcolors[name]
if type(delayedindexcolor) == "function" then
indexcolorhash[name] = delayedindexcolor()
diff --git a/tex/context/base/bibl-bib.lua b/tex/context/base/bibl-bib.lua
index 3c0dad2fa..a74511427 100644
--- a/tex/context/base/bibl-bib.lua
+++ b/tex/context/base/bibl-bib.lua
@@ -25,6 +25,8 @@ local xmlfilter, xmltext = xml.filter, xml.text
local trace_bibxml = false trackers.register("publications.bibxml", function(v) trace_bibtex = v end)
+local report_publications = logs.new("publications")
+
bibtex = bibtex or { }
bibtex.size = 0
@@ -139,9 +141,9 @@ function bibtex.load(session,filename)
if filename ~= "" then
local data = io.loaddata(filename) or ""
if data == "" then
- logs.report("publications","empty file '%s', no conversion to xml",filename)
+ report_publications("empty file '%s', no conversion to xml",filename)
elseif trace_bibxml then
- logs.report("publications","converting file '%s' to xml",filename)
+ report_publications("converting file '%s' to xml",filename)
end
bibtex.convert(session,data)
end
diff --git a/tex/context/base/bibl-tra.lua b/tex/context/base/bibl-tra.lua
index 442231028..f9a16f699 100644
--- a/tex/context/base/bibl-tra.lua
+++ b/tex/context/base/bibl-tra.lua
@@ -15,6 +15,8 @@ local variables, constants = interfaces.variables, interfaces.constants
local trace_bibtex = false trackers.register("publications.bibtex", function(v) trace_bibtex = v end)
+local report_publications = logs.new("publications")
+
local hacks = bibtex.hacks
local list, done, alldone, used, registered, ordered = { }, { }, { }, { }, { }, { }
@@ -34,7 +36,7 @@ function hacks.process(settings)
interfaces.showmessage("publications",3)
io.savedata(file.addsuffix(jobname,"aux"),format(template,style,database))
if trace_bibtex then
- logs.report("publications","processing bibtex file '%s'",jobname)
+ report_publications("processing bibtex file '%s'",jobname)
end
os.execute(format("bibtex %s",jobname))
-- purge 'm
@@ -43,7 +45,7 @@ end
function hacks.register(str)
if trace_bibtex then
- logs.report("publications","registering bibtex entry '%s'",str)
+ report_publications("registering bibtex entry '%s'",str)
end
registered[#registered+1] = str
ordered[str] = #registered
@@ -73,13 +75,13 @@ function hacks.add(str,listindex)
end
end
-local function compare(a,b)
- local aa, bb = a[1], b[1]
+local function compare(a,b) -- quite some checking for non-nil
+ local aa, bb = a and a[1], b and b[1]
if aa and bb then
- return ordered[aa] < ordered[bb]
- else
- return true
+ local oa, ob = ordered[aa], ordered[bb]
+ return oa and ob and oa < ob
end
+ return false
end
function hacks.flush(sortvariant)
@@ -106,7 +108,8 @@ end
-- we look forward
local function compare(a,b)
- return a[3] < b[3]
+ local aa, bb = a and a[3], b and b[3]
+ return aa and bb and aa < bb
end
function hacks.resolve(prefix,block,reference) -- maybe already feed it split
diff --git a/tex/context/base/blob-ini.lua b/tex/context/base/blob-ini.lua
index 0f7ccee26..da67df0d7 100644
--- a/tex/context/base/blob-ini.lua
+++ b/tex/context/base/blob-ini.lua
@@ -24,6 +24,8 @@ if not modules then modules = { } end modules ['blob-ini'] = {
local type = type
+local report_blobs = logs.new("blobs")
+
local utfvalues = string.utfvalues
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
@@ -147,7 +149,7 @@ function blobs.pack(t,how)
if how == "vertical" then
-- we need to prepend a local par node
-- list[i].pack = node.vpack(list[i].head,"exactly")
- logs.report("blobs","vpack not yet supported")
+ report_blobs("vpack not yet supported")
else
list[i].pack = hpack_node_list(list[i].head,"exactly")
end
diff --git a/tex/context/base/buff-ini.lua b/tex/context/base/buff-ini.lua
index 6b1af8f96..b3ab0c2a7 100644
--- a/tex/context/base/buff-ini.lua
+++ b/tex/context/base/buff-ini.lua
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['buff-ini'] = {
license = "see context related readme files"
}
+-- todo: deal with jobname here, or actually, "" is valid as well
+
-- ctx lua reference model / hooks and such
-- to be optimized
@@ -23,6 +25,8 @@ buffers.visualizers = { }
local trace_run = false trackers.register("buffers.run", function(v) trace_run = v end)
local trace_visualize = false trackers.register("buffers.visualize", function(v) trace_visualize = v end)
+local report_buffers = logs.new("buffers")
+
local utf = unicode.utf8
local concat, texsprint, texprint, texwrite = table.concat, tex.sprint, tex.print, tex.write
@@ -337,6 +341,15 @@ end
buffers.content = content
+function buffers.evaluate(name)
+ local ok = loadstring(content(name))
+ if ok then
+ ok()
+ else
+ report_buffers("invalid lua code in buffer '%s'",name)
+ end
+end
+
function buffers.collect(names,separator) -- no print
-- maybe we should always store a buffer as table so
-- that we can pass it directly
@@ -420,10 +433,10 @@ function buffers.loadvisualizer(name)
hn = handlers[visualizers.defaultname]
handlers[name] = hn
if trace_visualize then
- logs.report("buffers","mapping '%s' visualizer onto '%s'",name,visualizers.defaultname)
+ report_buffers("mapping '%s' visualizer onto '%s'",name,visualizers.defaultname)
end
elseif trace_visualize then
- logs.report("buffers","loading '%s' visualizer",name)
+ report_buffers("loading '%s' visualizer",name)
end
return hn
end
@@ -444,13 +457,13 @@ function buffers.setvisualizer(str)
currenthandler = handlers[currentvisualizer]
if currenthandler then
-- if trace_visualize then
- -- logs.report("buffers","enabling specific '%s' visualizer",currentvisualizer)
+ -- report_buffers("enabling specific '%s' visualizer",currentvisualizer)
-- end
else
currentvisualizer = visualizers.defaultname
currenthandler = handlers.default
-- if trace_visualize then
- -- logs.report("buffers","enabling default visualizer '%s'",currentvisualizer)
+ -- report_buffers("enabling default visualizer '%s'",currentvisualizer)
-- end
end
if currenthandler.reset then
@@ -764,7 +777,7 @@ function buffers.set_escape(name,pair)
if pair == variables.no then
visualizer.flush_line = visualizer.normal_flush_line or visualizer.flush_line
if trace_visualize then
- logs.report("buffers","resetting escape range for visualizer '%s'",name)
+ report_buffers("resetting escape range for visualizer '%s'",name)
end
else
local start, stop
@@ -785,10 +798,10 @@ function buffers.set_escape(name,pair)
flush_escaped_line(str,pattern,visualizer.normal_flush_line)
end
if trace_visualize then
- logs.report("buffers","setting escape range for visualizer '%s' to %s -> %s",name,start,stop)
+ report_buffers("setting escape range for visualizer '%s' to %s -> %s",name,start,stop)
end
elseif trace_visualize then
- logs.report("buffers","problematic escape specification '%s' for visualizer '%s'",pair,name)
+ report_buffers("problematic escape specification '%s' for visualizer '%s'",pair,name)
end
end
end
diff --git a/tex/context/base/buff-ini.mkiv b/tex/context/base/buff-ini.mkiv
index 86b0fa3c5..13f69554f 100644
--- a/tex/context/base/buff-ini.mkiv
+++ b/tex/context/base/buff-ini.mkiv
@@ -15,6 +15,10 @@
\registerctxluafile{buff-ini}{1.001}
+% todo: move all to lua, also before and after, just context.beforebuffer()
+% todo: commalist to lua end
+% todo: jobname == "" so no need for testing
+
% todo:
%
% \startluacode
@@ -352,4 +356,23 @@
\def\dosavebuffer[#1][#2]{\ctxlua{commands.savebuffer("#1","#2")}}
+%D Experimental: no expansion of commands in buffer!
+
+% \startbuffer[what]
+% tex.print("WHAT")
+% \stopbuffer
+% \startbuffer
+% tex.print("JOBNAME")
+% \stopbuffer
+%
+% \ctxluabuffer[what] \ctxluabuffer
+
+\def\ctxluabuffer
+ {\dosingleempty\doctxluabuffer}
+
+\def\doctxluabuffer[#1]%
+ {\doifelsenothing{#1}
+ {\ctxlua{buffers.evaluate("\jobname")}}
+ {\ctxlua{buffers.evaluate("#1")}}}
+
\protect \endinput
diff --git a/tex/context/base/buff-ver.mkiv b/tex/context/base/buff-ver.mkiv
index dacbdb7ac..dee83f86d 100644
--- a/tex/context/base/buff-ver.mkiv
+++ b/tex/context/base/buff-ver.mkiv
@@ -70,9 +70,6 @@
{\uppercasestring#1\to\ascii
\edef\prettyidentifier{\executeifdefined{\??ty\??ty\ascii}{TEX}}%
\begingroup
- % we can move this to lua
- % \lowercasestring \f!prettyprefix\prettyidentifier\to\filename
- % \doonlyonce\filename{\ctxloadluafile\filename\empty}%
\ctxlua{buffers.loadvisualizer("\ascii")}%
\endgroup}
diff --git a/tex/context/base/catc-act.tex b/tex/context/base/catc-act.tex
index bc24562d7..2cde28e44 100644
--- a/tex/context/base/catc-act.tex
+++ b/tex/context/base/catc-act.tex
@@ -58,4 +58,8 @@
\def\makecharacteractive #1 {\catcode`#1\active}
+\def\installanddefineactivecharacter #1 % #2%
+ {\normalexpanded{\noexpand\installactivecharacter \utfchar{#1} }%
+ \defineactivecharacter #1 }% {#2}}
+
\endinput
diff --git a/tex/context/base/catc-ini.mkiv b/tex/context/base/catc-ini.mkiv
index 269330a1b..fe178b532 100644
--- a/tex/context/base/catc-ini.mkiv
+++ b/tex/context/base/catc-ini.mkiv
@@ -14,7 +14,7 @@
%D We've split the functionality of syst-cat.* over more files
%D now so that we can load more selectively.
-\registerctxluafile{catc-ini} {1.001}
+\registerctxluafile{catc-ini}{1.001}
%D A long standing wish has been the availability of catcode
%D arrays. Because traditional \TEX\ does not provide this we
@@ -55,9 +55,7 @@
{\global\advance\cctdefcounter\plusone
\expandafter\xdef\csname @@ccn:\number\cctdefcounter\endcsname{\string#1}% logging
\global\chardef#1\cctdefcounter
- \ctxlua{catcodes.register("\expandafter\gobbleoneargument\string#1",\number#1)}%
- % we have two ways to access catcodetable numbers
- \startruntimectxluacode tex.\expandafter\gobbleoneargument\string#1 = \number#1 ;\stopruntimectxluacode}
+ \ctxlua{catcodes.register("\expandafter\gobbleoneargument\string#1",\number#1)}}
\newcatcodetable \scratchcatcodes \initcatcodetable\scratchcatcodes
diff --git a/tex/context/base/char-def.lua b/tex/context/base/char-def.lua
index b7abee0fb..99ac18978 100644
--- a/tex/context/base/char-def.lua
+++ b/tex/context/base/char-def.lua
@@ -48942,6 +48942,12 @@ characters.data={
description="DOUBLE VERTICAL LINE",
direction="on",
linebreak="ai",
+ mathspec={
+ { class="delimiter", name="Vert" },
+ { class="nothing", name="Arrowvert" },
+ { class="open", name="lVert" },
+ { class="close", name="rVert" },
+ },
unicodeslot=0x2016,
},
[0x2017]={
@@ -51730,7 +51736,6 @@ characters.data={
description="RIGHTWARDS ARROW FROM BAR",
direction="on",
linebreak="al",
- fallback=[[\mapstochar\rightarrow]],
mathclass="relation",
mathname="mapsto",
unicodeslot=0x21A6,
@@ -51755,7 +51760,6 @@ characters.data={
description="LEFTWARDS ARROW WITH HOOK",
direction="on",
linebreak="al",
- fallback=[[\leftarrow\joinrel\rhook]],
mathclass="relation",
mathname="hookleftarrow",
unicodeslot=0x21A9,
@@ -51765,7 +51769,6 @@ characters.data={
description="RIGHTWARDS ARROW WITH HOOK",
direction="on",
linebreak="al",
- fallback=[[\lhook\joinrel\rightarrow]],
mathclass="relation",
mathname="hookrightarrow",
unicodeslot=0x21AA,
@@ -52873,10 +52876,6 @@ characters.data={
linebreak="ai",
mathspec={
{ class="relation", name="parallel" },
- { class="delimiter", name="Vert" },
- { class="nothing", name="Arrowvert" },
- { class="open", name="lVert" },
- { class="close", name="rVert" },
},
unicodeslot=0x2225,
},
@@ -53383,7 +53382,6 @@ characters.data={
category="sm",
description="ESTIMATES",
direction="on",
- fallback=[[\buildrel\wedge\over=]],
linebreak="al",
unicodeslot=0x2259,
mathclass="relation",
@@ -53837,24 +53835,22 @@ characters.data={
category="sm",
description="NOT A SUBSET OF",
direction="on",
- fallback=[[\not\subset]],
mathclass="relation",
mathname="nsubset",
linebreak="al",
mirror=0x2285,
- specials={ "char", 0x2282, 0x0338 },
+ specials={ "char", 0x0338, 0x2282 },
unicodeslot=0x2284,
},
[0x2285]={
category="sm",
description="NOT A SUPERSET OF",
direction="on",
- fallback=[[\not\supset]],
linebreak="al",
mathclass="relation",
mathname="nsupset",
mirror=0x2284,
- specials={ "char", 0x2283, 0x0338 },
+ specials={ "char", 0x0338, 0x2283 },
unicodeslot=0x2285,
},
[0x2286]={
@@ -54181,7 +54177,6 @@ characters.data={
description="MODELS",
direction="on",
linebreak="al",
- fallback=[[\mathrel|\joinrel=]],
mathclass="relation",
mathname="models",
unicodeslot=0x22A7,
@@ -54478,7 +54473,6 @@ characters.data={
description="BOWTIE",
direction="on",
linebreak="al",
- fallback=[[\mathrel\triangleright\joinrel\mathrel\triangleleft]],
mathspec={
{ class="relation", name="bowtie" },
{ class="relation", name="Join" }, -- AM: Maybe wrong
@@ -63425,7 +63419,6 @@ characters.data={
description="LONG LEFTWARDS ARROW",
direction="on",
linebreak="al",
- fallback=[[\leftarrow\joinrel\relbar]],
mathclass="relation",
mathname="longleftarrow",
unicodeslot=0x27F5,
@@ -63435,7 +63428,6 @@ characters.data={
description="LONG RIGHTWARDS ARROW",
direction="on",
linebreak="al",
- fallback=[[\relbar\joinrel\rightarrow]],
mathclass="relation",
mathname="longrightarrow",
unicodeslot=0x27F6,
@@ -63445,7 +63437,6 @@ characters.data={
description="LONG LEFT RIGHT ARROW",
direction="on",
linebreak="al",
- fallback=[[\leftarrow\joinrel\rightarrow]],
mathclass="relation",
mathname="longleftrightarrow",
unicodeslot=0x27F7,
@@ -63455,7 +63446,6 @@ characters.data={
description="LONG LEFTWARDS DOUBLE ARROW",
direction="on",
linebreak="al",
- fallback=[[\Leftarrow\joinrel\Relbar]],
mathclass="relation",
mathname="Longleftarrow",
unicodeslot=0x27F8,
@@ -63465,7 +63455,6 @@ characters.data={
description="LONG RIGHTWARDS DOUBLE ARROW",
direction="on",
linebreak="al",
- fallback=[[\Relbar\joinrel\Rightarrow]],
mathclass="relation",
mathname="Longrightarrow",
unicodeslot=0x27F9,
@@ -63475,7 +63464,6 @@ characters.data={
description="LONG LEFT RIGHT DOUBLE ARROW",
direction="on",
linebreak="al",
- fallback=[[\Leftarrow\joinrel\Rightarrow]],
mathclass="relation",
mathname="Longleftrightarrow",
unicodeslot=0x27FA,
@@ -63485,7 +63473,6 @@ characters.data={
description="LONG LEFTWARDS ARROW FROM BAR",
direction="on",
linebreak="al",
- fallback=[[\longleftarrow\mapstochar]], -- untested
mathclass="relation",
mathname="longmapsfrom",
unicodeslot=0x27FB,
@@ -63495,7 +63482,6 @@ characters.data={
description="LONG RIGHTWARDS ARROW FROM BAR",
direction="on",
linebreak="al",
- fallback=[[\mapstochar\longrightarrow]],
mathclass="relation",
mathname="longmapsto",
unicodeslot=0x27FC,
diff --git a/tex/context/base/chem-ini.lua b/tex/context/base/chem-ini.lua
index 908749092..d5c189fff 100644
--- a/tex/context/base/chem-ini.lua
+++ b/tex/context/base/chem-ini.lua
@@ -11,6 +11,8 @@ local lpegmatch = lpeg.match
local trace_molecules = false trackers.register("chemistry.molecules", function(v) trace_molecules = v end)
+local report_chemistry = logs.new("chemistry")
+
local ctxcatcodes = tex.ctxcatcodes
chemicals = chemicals or { }
@@ -67,7 +69,7 @@ end
function commands.molecule(str)
if trace_molecules then
local rep = lpegmatch(parser,str)
- logs.report("chemistry", "molecule %s => %s",str,rep)
+ report_chemistry("molecule %s => %s",str,rep)
texsprint(ctxcatcodes,rep)
else
texsprint(ctxcatcodes,lpegmatch(parser,str))
diff --git a/tex/context/base/chem-str.lua b/tex/context/base/chem-str.lua
index ad4cc6c1b..0a963d781 100644
--- a/tex/context/base/chem-str.lua
+++ b/tex/context/base/chem-str.lua
@@ -13,6 +13,8 @@ if not modules then modules = { } end modules ['chem-str'] = {
local trace_structure = false trackers.register("chemistry.structure", function(v) trace_structure = v end)
local trace_textstack = false trackers.register("chemistry.textstack", function(v) trace_textstack = v end)
+local report_chemistry = logs.new("chemistry")
+
local format, gmatch, match, lower, gsub = string.format, string.gmatch, string.match, string.lower, string.gsub
local concat, insert, remove = table.concat, table.insert, table.remove
local apply = structure.processors.apply
@@ -170,7 +172,7 @@ local function fetch(txt)
end
if t then
if trace_textstack then
- logs.report("chemical", "fetching from stack %s slot %s: %s",txt,st.n,t)
+ report_chemistry("fetching from stack %s slot %s: %s",txt,st.n,t)
end
st.n = st.n + 1
end
@@ -441,7 +443,7 @@ function chemicals.stop()
metacode[#metacode+1] = "chem_stop_structure ;"
local mpcode = concat(metacode,"\n")
if trace_structure then
- logs.report("chemical", "metapost code:\n%s", mpcode)
+ report_chemistry("metapost code:\n%s", mpcode)
end
metapost.graphic(chemicals.instance,chemicals.format,mpcode)
metacode = nil
diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv
index 6269e5a61..08307b0d5 100644
--- a/tex/context/base/cont-new.mkiv
+++ b/tex/context/base/cont-new.mkiv
@@ -17,9 +17,9 @@
\unprotect
-\ctxlua{logs.report = commands.report} % this will become default
+% \ctxlua{logs.report = commands.writereport} % this will become default
-\def\immediatemessage#1{\ctxlua{commands.writestatus("message","#1")}}
+\def\immediatemessage#1{\ctxlua{logs.status("message","#1")}}
% we need to figure this out (to be discussed)
diff --git a/tex/context/base/cont-new.tex b/tex/context/base/cont-new.tex
index 9c4fdba18..ac05755bb 100644
--- a/tex/context/base/cont-new.tex
+++ b/tex/context/base/cont-new.tex
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2010.05.24 13:05}
+\newcontextversion{2010.06.23 12:45}
%D This file is loaded at runtime, thereby providing an
%D excellent place for hacks, patches, extensions and new
@@ -95,7 +95,7 @@
\prependtoks \restoreendofline \to \everybeforeshipout
-\let\cs\getvalue
+% \let\cs\getvalue % no, we want \cs to be czech
% experimental so this may change
diff --git a/tex/context/base/context.lus b/tex/context/base/context.lus
new file mode 100644
index 000000000..960e96adf
--- /dev/null
+++ b/tex/context/base/context.lus
@@ -0,0 +1,71 @@
+if not modules then modules = { } end modules ['context'] = {
+ version = 1.001,
+ comment = "companion to context.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--[[<p>This table specifies what stub files are needed in order to create
+the format. These files are loaded before the format is made so that we
+bypass kpse. When the format itself is used, another stub is used (with
+suffix lui). The current format builder is to a large part determined by
+the way luatex evolved and the process will probaby change.</p>]]--
+
+local method = 3
+
+local stubfiles = {
+
+ 'luat-cod.lua',
+
+ -- Here follows a list of trac, luat and data files, but we don't
+ -- it this way any more so there is no need to keep this updated.
+
+}
+
+-- This method will trigger the creation of a stub file with all neccessary
+-- libraries merged. This is how we originally did it.
+
+if method == 1 then
+
+ return stubfiles
+
+end
+
+-- This method will use this file as stub file so no merge is needed.
+
+if method == 2 then
+
+ if resolvers then
+ -- we're loading this file in mtxrun
+ else
+
+ local sourcepath = string.gsub(arg and arg[1] or "","/[^/]+$","")
+ local targetpath = "."
+
+ if sourcepath == "" then sourcepath = targetpath end
+
+ for i=1,#stubfiles do
+ local filename = sourcepath .. "/" .. stubfiles[i]
+ texio.write_nl("preloading " .. filename)
+ dofile(filename)
+ end
+ texio.write_nl("\n")
+
+ end
+
+ return "context.lus"
+
+end
+
+-- Only a simple stub:
+
+if method == 3 then
+
+ return "luat-cod.lua"
+
+end
+
+-- The last resort.
+
+return stubfiles
diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv
index 33fa3a901..32bcfbe24 100644
--- a/tex/context/base/context.mkiv
+++ b/tex/context/base/context.mkiv
@@ -11,26 +11,29 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-% syst-cat -> catc-ini + vectors
-% spec-* -> special backends for luatex
-
%D First we load the system modules. These implement a lot of
-%D manipulation macros. The first one loads \PLAIN\ \TEX, as
-%D minimal as possible.
+%D manipulation macros. We start with setting up some basic \TEX\
+%D machinery.
\loadcorefile{syst-ini}
+%D We just quit if new functionality is expected.
+
\ifnum\luatexversion<60 % also change message
\writestatus{!!!!}{Your luatex binary is too old, you need at least version 0.60!}
\expandafter\end
\fi
-\newtoks\contextversiontoks \contextversiontoks\expandafter{\contextversion} % at the lua end
+%D There is only this way to pass the version info
+%D to \LUA\ (currently).
+
+\newtoks\contextversiontoks \contextversiontoks\expandafter{\contextversion}
+
+%D Now the more fundamnetal code gets defined.
\loadcorefile{norm-ctx}
\loadcorefile{syst-pln}
-
-\newif\ifCONTEXT \CONTEXTtrue % will disappear
+\loadmarkfile{syst-mes}
\loadmarkfile{luat-cod}
\loadmarkfile{luat-bas}
@@ -72,6 +75,8 @@
\loadmarkfile{toks-ini}
+\loadmarkfile{attr-ini}
+
\loadmarkfile{node-ini}
\loadmarkfile{node-fin}
\loadmarkfile{node-mig}
@@ -85,12 +90,11 @@
\loadmarkfile{lpdf-pdx} % might be merged into lpdf-ini
\loadmarkfile{back-pdf} % some day back-ini will load this
-\loadmarkfile{attr-ini}
+\loadmarkfile{attr-div} % code will move
\loadmarkfile{core-env}
\loadmarkfile{trac-tex}
-\loadmarkfile{trac-lmx}
\loadmarkfile{trac-deb}
\loadmarkfile{blob-ini} % not to be used, we only use a helper
@@ -314,6 +318,7 @@
\loadmarkfile{core-fnt}
\loadmarkfile{node-rul}
+\loadmarkfile{node-spl}
\loadmarkfile{strc-not}
\loadmarkfile{strc-lnt}
@@ -358,19 +363,14 @@
\setupcurrentlanguage[\s!en]
\prependtoks
- \ctxlua{statistics.starttiming(ctx)}%
+ \ctxlua{statistics.starttiming(statistics)}%
\to \everyjob
\appendtoks
- \ctxlua{statistics.stoptiming(ctx)}%
+ \ctxlua{statistics.stoptiming(statistics)}%
\to \everyjob
\appendtoks
- \writestatus\m!lua{used config path - \ctxlua{tex.print(caches.configpath())}}%
- \writestatus\m!lua{used cache path - \ctxlua{tex.print(caches.path)}}%
-\to \everydump
-
-\appendtoks
\ctxlua {
statistics.report_storage("log")
statistics.save_fmt_status("\jobname","\contextversion","context.tex")
diff --git a/tex/context/base/context.tex b/tex/context/base/context.tex
index 47489658e..35ebd2267 100644
--- a/tex/context/base/context.tex
+++ b/tex/context/base/context.tex
@@ -20,7 +20,7 @@
%D your styles an modules.
\edef\contextformat {\jobname}
-\edef\contextversion{2010.05.24 13:05}
+\edef\contextversion{2010.06.23 12:45}
%D For those who want to use this:
diff --git a/tex/context/base/core-con.lua b/tex/context/base/core-con.lua
index dca1c7d10..f8f54e9e7 100644
--- a/tex/context/base/core-con.lua
+++ b/tex/context/base/core-con.lua
@@ -418,7 +418,7 @@ local vector = {
--~ return concat(result)
--~ end
-function tochinese(n,name) -- normal, caps, all
+local function tochinese(n,name) -- normal, caps, all
-- improved version by Li Yanrui
local result = { }
local vector = vector[name] or vector.normal
diff --git a/tex/context/base/core-env.mkiv b/tex/context/base/core-env.mkiv
index d927ff3ad..b60a01454 100644
--- a/tex/context/base/core-env.mkiv
+++ b/tex/context/base/core-env.mkiv
@@ -171,7 +171,7 @@
\unexpanded\def\startsetups{} % to please dep checker
\unexpanded\def\stopsetups {} % to please dep checker
-\expanded
+\expanded % will become obsolete
{\long\def\@EA\noexpand\csname\e!start\v!setups\endcsname
{\begingroup\noexpand\doifnextoptionalelse
{\noexpand\dostartsetupsA\@EA\noexpand\csname\e!stop\v!setups\endcsname}
@@ -215,38 +215,150 @@
\let\directsetup\dosetups
\def\doifsetupselse#1% to be done: grid
- {\doifdefinedelse{\??su:#1}}
+ {\doifdefinedelse{\??su:#1}} % doto: ifcsname
-\chardef\setupseolmode\plusone
+% % % %
-\unexpanded\def\startsetups {\xxstartsetups\plusone \stopsetups } \let\stopsetups \relax
-\unexpanded\def\startlocalsetups{\xxstartsetups\plusone \stoplocalsetups} \let\stoplocalsetups\relax
-\unexpanded\def\startrawsetups {\xxstartsetups\zerocount\stoprawsetups } \let\stoprawsetups \relax
-\unexpanded\def\startxmlsetups {\xxstartsetups\plustwo \stopxmlsetups } \let\stopxmlsetups \relax
-
-\def\xxstartsetups#1#2%
- {\begingroup\let\setupseolmode#1\doifnextoptionalelse{\dostartsetupsA#2}{\dostartsetupsB#2}}
-
-\def\dostartsetupsA#1% [ ] delimited
- {\ifcase\setupseolmode\or\catcode`\^^M\@@ignore\or\catcode`\^^M\@@ignore\catcode`\|\@@other\fi
- \dotripleempty\dostartsetups[#1]}
-
-\def\dostartsetupsB#1#2 % space delimited
- {\ifcase\setupseolmode\or\catcode`\^^M\@@ignore\or\catcode`\^^M\@@ignore\catcode`\|\@@other\fi
- \dodostartsetups#1\empty{#2}}
-
-\def\dostartsetupsC[#1][#2][#3]{\dodostartsetups#1{#2}{#3}} % [..] [..]
-\def\dostartsetupsD[#1][#2][#3]{\dodostartsetups#1\empty{#2}} % [..]
-
-\def\dostartsetups
- {\ifthirdargument\@EA\dostartsetupsC\else\@EA\dostartsetupsD\fi}
-
-\long\def\dodostartsetups#1#2#3%
- {\long\def\dododostartsetups##1#1%
- {\endgroup
- \dodoglobal % bah
- \long\expandafter\def\csname\??su#2:#3\expandafter\endcsname\expandafter####\expandafter1\expandafter{##1}}%
- \dododostartsetups\empty} % the empty trick prevents the { } in {arg} from being eaten up
+% \chardef\setupseolmode\plusone
+%
+% \unexpanded\def\startsetups {\xxstartsetups\plusone \stopsetups } \let\stopsetups \relax
+% \unexpanded\def\startlocalsetups{\xxstartsetups\plusone \stoplocalsetups} \let\stoplocalsetups\relax
+% \unexpanded\def\startrawsetups {\xxstartsetups\zerocount\stoprawsetups } \let\stoprawsetups \relax
+% \unexpanded\def\startxmlsetups {\xxstartsetups\plustwo \stopxmlsetups } \let\stopxmlsetups \relax
+%
+% \def\xxstartsetups#1#2%
+% {\begingroup\let\setupseolmode#1\doifnextoptionalelse{\dostartsetupsA#2}{\dostartsetupsB#2}}
+%
+% \def\dostartsetupsA#1% [ ] delimited
+% {\ifcase\setupseolmode\or\catcode`\^^M\@@ignore\or\catcode`\^^M\@@ignore\catcode`\|\@@other\fi
+% \dotripleempty\dostartsetups[#1]}
+%
+% \def\dostartsetupsB#1#2 % space delimited
+% {\ifcase\setupseolmode\or\catcode`\^^M\@@ignore\or\catcode`\^^M\@@ignore\catcode`\|\@@other\fi
+% \dodostartsetups#1\empty{#2}}
+%
+% \def\dostartsetupsC[#1][#2][#3]{\dodostartsetups#1{#2}{#3}} % [..] [..]
+% \def\dostartsetupsD[#1][#2][#3]{\dodostartsetups#1\empty{#2}} % [..]
+%
+% \def\dostartsetups
+% {\ifthirdargument\@EA\dostartsetupsC\else\@EA\dostartsetupsD\fi}
+%
+% \long\def\dodostartsetups#1#2#3% needs a speedup
+% {\long\def\dododostartsetups##1#1%
+% {\endgroup
+% \dodoglobal % bah
+% \long\expandafter\def\csname\??su#2:#3\expandafter\endcsname\expandafter####\expandafter1\expandafter{##1}}%
+% \dododostartsetups\empty} % the empty trick prevents the { } in {arg} from being eaten up
+
+% % % %
+
+% \startluasetups oeps
+% tex.print("DONE")
+% a = 1
+% b = 1
+% \stopluasetups
+%
+% \luasetup{oeps}
+%
+% \startsetups xxx
+% ziezo
+% \stopsetups
+%
+% \directsetup{xxx}
+%
+% \startxmlsetups zzz
+% [[#1]]
+% \stopxmlsetups
+%
+% \xmlsetup{123}{zzz}
+%
+% \startbuffer[what]
+% tex.print("DONE")
+% \stopbuffer
+%
+% \startbuffer
+% tex.print("MORE")
+% \stopbuffer
+%
+% \ctxluabuffer[what]
+%
+% \ctxluabuffer
+
+\newtoks\everydefinesetups \appendtoks
+ \catcode`\^^M\@@ignore
+\to \everydefinesetups
+
+\newtoks\everydefinelocalsetups \appendtoks
+ \catcode`\^^M\@@ignore
+\to \everydefinelocalsetups
+
+\newtoks\everydefinerawsetups \appendtoks
+ % nothing
+\to \everydefinerawsetups
+
+\newtoks\everydefinexmlsetups \appendtoks
+ \catcode`\^^M\@@ignore
+ \catcode`\|\@@other
+\to \everydefinexmlsetups
+
+\newtoks\everydefineluasetups \appendtoks
+ \obeylualines
+ \obeyluatokens
+\to \everydefineluasetups
+
+\unexpanded\def\startluasetups {\begingroup\doifnextoptionalelse\dostartluasetupsA \dostartluasetupsB }
+\unexpanded\def\startxmlsetups {\begingroup\doifnextoptionalelse\dostartxmlsetupsA \dostartxmlsetupsB }
+\unexpanded\def\startrawsetups {\begingroup\doifnextoptionalelse\dostartrawsetupsA \dostartrawsetupsB }
+\unexpanded\def\startlocalsetups{\begingroup\doifnextoptionalelse\dostartlocalsetupsA\dostartlocalsetupsB}
+\unexpanded\def\startsetups {\begingroup\doifnextoptionalelse\dostartsetupsA \dostartsetupsB }
+
+\let\stopluasetups \relax
+\let\stopxmlsetups \relax
+\let\stoprawsetups \relax
+\let\stoplocalsetups \relax
+\let\stopsetups \relax
+
+\def\dodostartluasetups #1#2#3\stopluasetups {\endgroup\dodoglobal\long\@EA\def\csname\??su#1:#2\@EA\endcsname\@EA##\@EA1\@EA{#3}}
+\def\dodostartxmlsetups #1#2#3\stopxmlsetups {\endgroup\dodoglobal\long\@EA\def\csname\??su#1:#2\@EA\endcsname\@EA##\@EA1\@EA{#3}}
+\def\dodostartrawsetups #1#2#3\stoprawsetups {\endgroup\dodoglobal\long\@EA\def\csname\??su#1:#2\@EA\endcsname\@EA##\@EA1\@EA{#3}}
+\def\dodostartlocalsetups #1#2#3\stoplocalsetups{\endgroup\dodoglobal\long\@EA\def\csname\??su#1:#2\@EA\endcsname\@EA##\@EA1\@EA{#3}}
+\def\dodostartsetups #1#2#3\stopsetups {\endgroup\dodoglobal\long\@EA\def\csname\??su#1:#2\@EA\endcsname\@EA##\@EA1\@EA{#3}}
+
+\def\dostartluasetups {\ifsecondargument\@EA\dostartluasetupsC \else\@EA\dostartluasetupsD \fi}
+\def\dostartxmlsetups {\ifsecondargument\@EA\dostartxmlsetupsC \else\@EA\dostartxmlsetupsD \fi}
+\def\dostartrawsetups {\ifsecondargument\@EA\dostartrawsetupsC \else\@EA\dostartrawsetupsD \fi}
+\def\dostartlocalsetups {\ifsecondargument\@EA\dostartlocalsetupsC\else\@EA\dostartlocalsetupsD\fi}
+\def\dostartsetups {\ifsecondargument\@EA\dostartsetupsC \else\@EA\dostartsetupsD \fi}
+
+\def\dostartluasetupsA {\the\everydefineluasetups \dodoubleempty\dostartluasetups} % [ ] delimited
+\def\dostartxmlsetupsA {\the\everydefinexmlsetups \dodoubleempty\dostartxmlsetups} % [ ] delimited
+\def\dostartrawsetupsA {\the\everydefinerawsetups \dodoubleempty\dostartrawsetups} % [ ] delimited
+\def\dostartlocalsetupsA {\the\everydefinelocalsetups\dodoubleempty\dostartlocalsetups} % [ ] delimited
+\def\dostartsetupsA {\the\everydefinesetups \dodoubleempty\dostartsetups} % [ ] delimited
+
+ % empty preserves inner {} (is removed by the \@EA{#3})
+
+\def\dostartluasetupsB #1 {\the\everydefineluasetups \dodostartluasetups \empty{#1}\empty} % space delimited
+\def\dostartxmlsetupsB #1 {\the\everydefinexmlsetups \dodostartxmlsetups \empty{#1}\empty} % space delimited
+\def\dostartrawsetupsB #1 {\the\everydefinerawsetups \dodostartrawsetups \empty{#1}\empty} % space delimited
+\def\dostartlocalsetupsB #1 {\the\everydefinelocalsetups\dodostartlocalsetups\empty{#1}\empty} % space delimited
+\def\dostartsetupsB #1 {\the\everydefinesetups \dodostartsetups \empty{#1}\empty} % space delimited
+
+\def\dostartluasetupsC [#1][#2]{\the\everydefineluasetups \dodostartluasetups {#1}{#2}\empty} % [..] [..]
+\def\dostartxmlsetupsC [#1][#2]{\the\everydefinexmlsetups \dodostartxmlsetups {#1}{#2}\empty} % [..] [..]
+\def\dostartrawsetupsC [#1][#2]{\the\everydefinerawsetups \dodostartrawsetups {#1}{#2}\empty} % [..] [..]
+\def\dostartlocalsetupsC[#1][#2]{\the\everydefinelocalsetups\dodostartlocalsetups{#1}{#2}\empty} % [..] [..]
+\def\dostartsetupsC [#1][#2]{\the\everydefinesetups \dodostartsetups {#1}{#2}\empty} % [..] [..]
+
+\def\dostartluasetupsD [#1][#2]{\the\everydefineluasetups \dodostartluasetups \empty{#1}\empty} % [..]
+\def\dostartxmlsetupsD [#1][#2]{\the\everydefinexmlsetups \dodostartxmlsetups \empty{#1}\empty} % [..]
+\def\dostartrawsetupsD [#1][#2]{\the\everydefinerawsetups \dodostartrawsetups \empty{#1}\empty} % [..]
+\def\dostartlocalsetupsD[#1][#2]{\the\everydefinelocalsetups\dodostartlocalsetups\empty{#1}\empty} % [..]
+\def\dostartsetupsD [#1][#2]{\the\everydefinesetups \dodostartsetups \empty{#1}\empty} % [..]
+
+\def\luasetup#1{\ctxlua{\dosetups{#1}}}
+
+% % % %
\def\systemsetupsprefix{*}
diff --git a/tex/context/base/core-uti.lua b/tex/context/base/core-uti.lua
index 01fd8522b..68efdcb0c 100644
--- a/tex/context/base/core-uti.lua
+++ b/tex/context/base/core-uti.lua
@@ -21,6 +21,8 @@ local sort, concat, format, match = table.sort, table.concat, string.format, str
local next, type, tostring = next, type, tostring
local texsprint, ctxcatcodes = tex.sprint, tex.ctxcatcodes
+local report_jobcontrol = logs.new("jobcontrol")
+
if not jobs then jobs = { } end
if not job then jobs['main'] = { } end job = jobs['main']
@@ -42,7 +44,7 @@ job.comment(format("version: %1.2f",jobs.version))
function job.initialize(loadname,savename)
job.load(loadname)
- main.register_stop_actions(function()
+ luatex.register_stop_actions(function()
if not status.lasterrorstring or status.lasterrorstring == "" then
job.save(savename)
end
@@ -239,7 +241,7 @@ function job.load(filename)
if data and data ~= "" then
local version = tonumber(match(data,"^-- version: ([%d%.]+)"))
if version ~= jobs.version then
- logs.report("job","version mismatch with jobfile: %s <> %s", version or "?", jobs.version)
+ report_jobcontrol("version mismatch with jobfile: %s <> %s", version or "?", jobs.version)
else
local data = loadstring(data)
if data then
@@ -262,7 +264,7 @@ end
-- eventually this will end up in strc-ini
statistics.register("startup time", function()
- return statistics.elapsedseconds(ctx,"including runtime option file processing")
+ return statistics.elapsedseconds(statistics,"including runtime option file processing")
end)
statistics.register("jobdata time",function()
diff --git a/tex/context/base/core-uti.mkiv b/tex/context/base/core-uti.mkiv
index 6b2dae2c9..352093ff5 100644
--- a/tex/context/base/core-uti.mkiv
+++ b/tex/context/base/core-uti.mkiv
@@ -21,18 +21,6 @@
{\ctxlua{jobvariables.save("\strippedcsname#1","#2")}}
\appendtoks
- \ctxlua{storage.dump()}% will move to lua
-\to \everydump
-
-\appendtoks
- \ctxlua{storage.finalize()}% will move to lua
-\to \everyfinalizeluacode
-
-\appendtoks
- \ctxlua{nodes.cleanup_reserved()}% will move to lua
-\to \everydump
-
-\appendtoks
\ctxlua {
job.comment("file: \jobname")
job.comment("format: \contextformat")
@@ -44,23 +32,9 @@
\def\notuccompression{\ctxlua{job.pack=false}}
-% cleaner, for the moment
-
-% \appendtoks
-% \ctxlua {
-% os.remove("\jobname.tui")
-% os.remove("\jobname.tuo")
-% }%
-% \to \everystarttext
-
%D Some styles might use these use these commands:
-\newif \ifutilitydone
-\let \checkutilities \relax
-\let \currentutilityfilename \jobname
-\def \installprogram {\dosingleempty\doinstallprogram}
-\def \doinstallprogram [#1]{\gobbleoneargument}
-\def \installedprogram [#1]{}
-\let \installplugin \gobblethreearguments
+\def\installprogram {\dosingleempty\doinstallprogram}
+\def\doinstallprogram[#1]{\gobbleoneargument}
\protect \endinput
diff --git a/tex/context/base/data-aux.lua b/tex/context/base/data-aux.lua
index 26e1f551c..06322a848 100644
--- a/tex/context/base/data-aux.lua
+++ b/tex/context/base/data-aux.lua
@@ -7,49 +7,52 @@ if not modules then modules = { } end modules ['data-aux'] = {
}
local find = string.find
+local type, next = type, next
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local report_resolvers = logs.new("resolvers")
+
function resolvers.update_script(oldname,newname) -- oldname -> own.name, not per se a suffix
local scriptpath = "scripts/context/lua"
newname = file.addsuffix(newname,"lua")
local oldscript = resolvers.clean_path(oldname)
if trace_locating then
- logs.report("fileio","to be replaced old script %s", oldscript)
+ report_resolvers("to be replaced old script %s", oldscript)
end
local newscripts = resolvers.find_files(newname) or { }
if #newscripts == 0 then
if trace_locating then
- logs.report("fileio","unable to locate new script")
+ report_resolvers("unable to locate new script")
end
else
for i=1,#newscripts do
local newscript = resolvers.clean_path(newscripts[i])
if trace_locating then
- logs.report("fileio","checking new script %s", newscript)
+ report_resolvers("checking new script %s", newscript)
end
if oldscript == newscript then
if trace_locating then
- logs.report("fileio","old and new script are the same")
+ report_resolvers("old and new script are the same")
end
elseif not find(newscript,scriptpath) then
if trace_locating then
- logs.report("fileio","new script should come from %s",scriptpath)
+ report_resolvers("new script should come from %s",scriptpath)
end
elseif not (find(oldscript,file.removesuffix(newname).."$") or find(oldscript,newname.."$")) then
if trace_locating then
- logs.report("fileio","invalid new script name")
+ report_resolvers("invalid new script name")
end
else
local newdata = io.loaddata(newscript)
if newdata then
if trace_locating then
- logs.report("fileio","old script content replaced by new content")
+ report_resolvers("old script content replaced by new content")
end
io.savedata(oldscript,newdata)
break
elseif trace_locating then
- logs.report("fileio","unable to load new script")
+ report_resolvers("unable to load new script")
end
end
end
diff --git a/tex/context/base/data-con.lua b/tex/context/base/data-con.lua
index fabe0baa1..0c16571cb 100644
--- a/tex/context/base/data-con.lua
+++ b/tex/context/base/data-con.lua
@@ -1,5 +1,5 @@
if not modules then modules = { } end modules ['data-con'] = {
- version = 1.001,
+ version = 1.100,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
@@ -29,46 +29,58 @@ containers = containers or { }
containers.usecache = true
+local report_cache = logs.new("cache")
+
local function report(container,tag,name)
if trace_cache or trace_containers then
- logs.report(format("%s cache",container.subcategory),"%s: %s",tag,name or 'invalid')
+ report_cache("container: %s, tag: %s, name: %s",container.subcategory,tag,name or 'invalid')
end
end
local allocated = { }
--- tracing
+local mt = {
+ __index = function(t,k)
+ if k == "writable" then
+ local writable = caches.getwritablepath(t.category,t.subcategory) or { "." }
+ t.writable = writable
+ return writable
+ elseif k == "readables" then
+ local readables = caches.getreadablepaths(t.category,t.subcategory) or { "." }
+ t.readables = readables
+ return readables
+ end
+ end
+}
function containers.define(category, subcategory, version, enabled)
- return function()
- if category and subcategory then
- local c = allocated[category]
- if not c then
- c = { }
- allocated[category] = c
- end
- local s = c[subcategory]
- if not s then
- s = {
- category = category,
- subcategory = subcategory,
- storage = { },
- enabled = enabled,
- version = version or 1.000,
- trace = false,
- path = caches and caches.setpath and caches.setpath(category,subcategory),
- }
- c[subcategory] = s
- end
- return s
- else
- return nil
+ if category and subcategory then
+ local c = allocated[category]
+ if not c then
+ c = { }
+ allocated[category] = c
+ end
+ local s = c[subcategory]
+ if not s then
+ s = {
+ category = category,
+ subcategory = subcategory,
+ storage = { },
+ enabled = enabled,
+ version = version or math.pi, -- after all, this is TeX
+ trace = false,
+ -- writable = caches.getwritablepath and caches.getwritablepath (category,subcategory) or { "." },
+ -- readables = caches.getreadablepaths and caches.getreadablepaths(category,subcategory) or { "." },
+ }
+ setmetatable(s,mt)
+ c[subcategory] = s
end
+ return s
end
end
function containers.is_usable(container, name)
- return container.enabled and caches and caches.iswritable(container.path, name)
+ return container.enabled and caches and caches.iswritable(container.writable, name)
end
function containers.is_valid(container, name)
@@ -81,18 +93,20 @@ function containers.is_valid(container, name)
end
function containers.read(container,name)
- if container.enabled and caches and not container.storage[name] and containers.usecache then
- container.storage[name] = caches.loaddata(container.path,name)
- if containers.is_valid(container,name) then
+ local storage = container.storage
+ local stored = storage[name]
+ if not stored and container.enabled and caches and containers.usecache then
+ stored = caches.loaddata(container.readables,name)
+ if stored and stored.cache_version == container.version then
report(container,"loaded",name)
else
- container.storage[name] = nil
+ stored = nil
end
- end
- if container.storage[name] then
+ storage[name] = stored
+ elseif stored then
report(container,"reusing",name)
end
- return container.storage[name]
+ return stored
end
function containers.write(container, name, data)
@@ -101,7 +115,7 @@ function containers.write(container, name, data)
if container.enabled and caches then
local unique, shared = data.unique, data.shared
data.unique, data.shared = nil, nil
- caches.savedata(container.path, name, data)
+ caches.savedata(container.writable, name, data)
report(container,"saved",name)
data.unique, data.shared = unique, shared
end
diff --git a/tex/context/base/data-crl.lua b/tex/context/base/data-crl.lua
index 55b1a8fad..b83e59bdf 100644
--- a/tex/context/base/data-crl.lua
+++ b/tex/context/base/data-crl.lua
@@ -6,32 +6,31 @@ if not modules then modules = { } end modules ['data-crl'] = {
license = "see context related readme files"
}
-local gsub = string.gsub
+-- this one is replaced by data-sch.lua --
curl = curl or { }
-curl.cached = { }
-curl.cachepath = caches.definepath("curl")
-
+local gsub = string.gsub
local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
-function curl.fetch(protocol, name)
- local cachename = curl.cachepath() .. "/" .. gsub(name,"[^%a%d%.]+","-")
--- cachename = gsub(cachename,"[\\/]", io.fileseparator)
- cachename = gsub(cachename,"[\\]", "/") -- cleanup
- if not curl.cached[name] then
+local cached = { }
+
+function curl.fetch(protocol, name) -- todo: use socket library
+ local cleanname = gsub(name,"[^%a%d%.]+","-")
+ local cachename = caches.setfirstwritablefile(cleanname,"curl")
+ if not cached[name] then
if not io.exists(cachename) then
- curl.cached[name] = cachename
+ cached[name] = cachename
local command = "curl --silent --create-dirs --output " .. cachename .. " " .. name -- no protocol .. "://"
os.spawn(command)
end
if io.exists(cachename) then
- curl.cached[name] = cachename
+ cached[name] = cachename
else
- curl.cached[name] = ""
+ cached[name] = ""
end
end
- return curl.cached[name]
+ return cached[name]
end
function finders.curl(protocol,filename)
diff --git a/tex/context/base/data-ctx.lua b/tex/context/base/data-ctx.lua
index 89eb2742d..47f9c527e 100644
--- a/tex/context/base/data-ctx.lua
+++ b/tex/context/base/data-ctx.lua
@@ -8,24 +8,34 @@ if not modules then modules = { } end modules ['data-ctx'] = {
local format = string.format
-function resolvers.save_used_files_in_trees(filename,jobname)
- if not filename then filename = 'luatex.jlg' end
- local found = instance.foundintrees
+local report_resolvers = logs.new("resolvers")
+
+function resolvers.save_used_files_in_trees()
+ local jobname = environment.jobname
+ if not jobname or jobname == "" then jobname = "luatex" end
+ local filename = file.replacesuffix(jobname,'jlg')
local f = io.open(filename,'w')
if f then
f:write("<?xml version='1.0' standalone='yes'?>\n")
f:write("<rl:job>\n")
- if jobname then
- f:write(format("\t<rl:name>%s</rl:name>\n",jobname))
- end
- f:write("\t<rl:files>\n")
+ f:write(format("\t<rl:jobname>%s</rl:jobname>\n",jobname))
+ f:write(format("\t<rl:contextversion>%s</rl:contextversion>\n",environment.version))
+ local found = resolvers.instance.foundintrees
local sorted = table.sortedkeys(found)
- for k=1,#sorted do
- local v = sorted[k]
- f:write(format("\t\t<rl:file n='%s'>%s</rl:file>\n",found[v],v))
+ if #sorted > 0 then
+ f:write("\t<rl:files>\n")
+ for k=1,#sorted do
+ local v = sorted[k]
+ f:write(format("\t\t<rl:file n='%s'>%s</rl:file>\n",found[v],v))
+ end
+ f:write("\t</rl:files>\n")
+ else
+ f:write("\t<rl:files/>\n")
end
- f:write("\t</rl:files>\n")
- f:write("</rl:usedfiles>\n")
+ f:write("</rl:job>\n")
f:close()
+ report_resolvers("saving used tree files in '%s'",filename)
end
end
+
+directives.register("system.dumpfiles", function() resolvers.save_used_files_in_trees() end)
diff --git a/tex/context/base/data-env.lua b/tex/context/base/data-env.lua
new file mode 100644
index 000000000..f99cb47f5
--- /dev/null
+++ b/tex/context/base/data-env.lua
@@ -0,0 +1,161 @@
+if not modules then modules = { } end modules ['data-env'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+local formats = { } resolvers.formats = formats
+local suffixes = { } resolvers.suffixes = suffixes
+local dangerous = { } resolvers.dangerous = dangerous
+local suffixmap = { } resolvers.suffixmap = suffixmap
+local alternatives = { } resolvers.alternatives = alternatives
+
+formats['afm'] = 'AFMFONTS' suffixes['afm'] = { 'afm' }
+formats['enc'] = 'ENCFONTS' suffixes['enc'] = { 'enc' }
+formats['fmt'] = 'TEXFORMATS' suffixes['fmt'] = { 'fmt' }
+formats['map'] = 'TEXFONTMAPS' suffixes['map'] = { 'map' }
+formats['mp'] = 'MPINPUTS' suffixes['mp'] = { 'mp' }
+formats['ocp'] = 'OCPINPUTS' suffixes['ocp'] = { 'ocp' }
+formats['ofm'] = 'OFMFONTS' suffixes['ofm'] = { 'ofm', 'tfm' }
+formats['otf'] = 'OPENTYPEFONTS' suffixes['otf'] = { 'otf' }
+formats['opl'] = 'OPLFONTS' suffixes['opl'] = { 'opl' }
+formats['otp'] = 'OTPINPUTS' suffixes['otp'] = { 'otp' }
+formats['ovf'] = 'OVFFONTS' suffixes['ovf'] = { 'ovf', 'vf' }
+formats['ovp'] = 'OVPFONTS' suffixes['ovp'] = { 'ovp' }
+formats['tex'] = 'TEXINPUTS' suffixes['tex'] = { 'tex' }
+formats['tfm'] = 'TFMFONTS' suffixes['tfm'] = { 'tfm' }
+formats['ttf'] = 'TTFONTS' suffixes['ttf'] = { 'ttf', 'ttc', 'dfont' }
+formats['pfb'] = 'T1FONTS' suffixes['pfb'] = { 'pfb', 'pfa' }
+formats['vf'] = 'VFFONTS' suffixes['vf'] = { 'vf' }
+formats['fea'] = 'FONTFEATURES' suffixes['fea'] = { 'fea' }
+formats['cid'] = 'FONTCIDMAPS' suffixes['cid'] = { 'cid', 'cidmap' }
+formats['texmfscripts'] = 'TEXMFSCRIPTS' suffixes['texmfscripts'] = { 'rb', 'pl', 'py' }
+formats['lua'] = 'LUAINPUTS' suffixes['lua'] = { 'lua', 'luc', 'tma', 'tmc' }
+formats['lib'] = 'CLUAINPUTS' suffixes['lib'] = (os.libsuffix and { os.libsuffix }) or { 'dll', 'so' }
+
+-- backward compatible ones
+
+alternatives['map files'] = 'map'
+alternatives['enc files'] = 'enc'
+alternatives['cid maps'] = 'cid' -- great, why no cid files
+alternatives['font feature files'] = 'fea' -- and fea files here
+alternatives['opentype fonts'] = 'otf'
+alternatives['truetype fonts'] = 'ttf'
+alternatives['truetype collections'] = 'ttc'
+alternatives['truetype dictionary'] = 'dfont'
+alternatives['type1 fonts'] = 'pfb'
+
+--[[ldx--
+<p>If you wondered about some of the previous mappings, how about
+the next bunch:</p>
+--ldx]]--
+
+-- kpse specific ones (a few omitted) .. we only add them for locating
+-- files that we don't use anyway
+
+formats['base'] = 'MFBASES' suffixes['base'] = { 'base', 'bas' }
+formats['bib'] = '' suffixes['bib'] = { 'bib' }
+formats['bitmap font'] = '' suffixes['bitmap font'] = { }
+formats['bst'] = '' suffixes['bst'] = { 'bst' }
+formats['cmap files'] = 'CMAPFONTS' suffixes['cmap files'] = { 'cmap' }
+formats['cnf'] = '' suffixes['cnf'] = { 'cnf' }
+formats['cweb'] = '' suffixes['cweb'] = { 'w', 'web', 'ch' }
+formats['dvips config'] = '' suffixes['dvips config'] = { }
+formats['gf'] = '' suffixes['gf'] = { '<resolution>gf' }
+formats['graphic/figure'] = '' suffixes['graphic/figure'] = { 'eps', 'epsi' }
+formats['ist'] = '' suffixes['ist'] = { 'ist' }
+formats['lig files'] = 'LIGFONTS' suffixes['lig files'] = { 'lig' }
+formats['ls-R'] = '' suffixes['ls-R'] = { }
+formats['mem'] = 'MPMEMS' suffixes['mem'] = { 'mem' }
+formats['MetaPost support'] = '' suffixes['MetaPost support'] = { }
+formats['mf'] = 'MFINPUTS' suffixes['mf'] = { 'mf' }
+formats['mft'] = '' suffixes['mft'] = { 'mft' }
+formats['misc fonts'] = '' suffixes['misc fonts'] = { }
+formats['other text files'] = '' suffixes['other text files'] = { }
+formats['other binary files'] = '' suffixes['other binary files'] = { }
+formats['pdftex config'] = 'PDFTEXCONFIG' suffixes['pdftex config'] = { }
+formats['pk'] = '' suffixes['pk'] = { '<resolution>pk' }
+formats['PostScript header'] = 'TEXPSHEADERS' suffixes['PostScript header'] = { 'pro' }
+formats['sfd'] = 'SFDFONTS' suffixes['sfd'] = { 'sfd' }
+formats['TeX system documentation'] = '' suffixes['TeX system documentation'] = { }
+formats['TeX system sources'] = '' suffixes['TeX system sources'] = { }
+formats['Troff fonts'] = '' suffixes['Troff fonts'] = { }
+formats['type42 fonts'] = 'T42FONTS' suffixes['type42 fonts'] = { }
+formats['web'] = '' suffixes['web'] = { 'web', 'ch' }
+formats['web2c files'] = 'WEB2C' suffixes['web2c files'] = { }
+formats['fontconfig files'] = 'FONTCONFIG_PATH' suffixes['fontconfig files'] = { } -- not unique
+
+alternatives['subfont definition files'] = 'sfd'
+
+-- A few accessors, mostly for command line tool.
+
+function resolvers.suffix_of_format(str)
+ local s = suffixes[str]
+ return s and s[1] or ""
+end
+
+function resolvers.suffixes_of_format(str)
+ return suffixes[str] or { }
+end
+
+-- As we don't register additional suffixes anyway, we can as well
+-- freeze the reverse map here.
+
+for name, suffixlist in next, suffixes do
+ for i=1,#suffixlist do
+ suffixmap[suffixlist[i]] = name
+ end
+end
+
+setmetatable(suffixes, { __newindex = function(suffixes,name,suffixlist)
+ rawset(suffixes,name,suffixlist)
+ suffixes[name] = suffixlist
+ for i=1,#suffixlist do
+ suffixmap[suffixlist[i]] = name
+ end
+end } )
+
+for name, format in next, formats do
+ dangerous[name] = true
+end
+
+-- because vf searching is somewhat dangerous, we want to prevent
+-- too liberal searching esp because we do a lookup on the current
+-- path anyway; only tex (or any) is safe
+
+dangerous.tex = nil
+
+--~ print(table.serialize(dangerous))
+
+-- more helpers
+
+function resolvers.format_of_var(str)
+ return formats[str] or formats[alternatives[str]] or ''
+end
+
+function resolvers.format_of_suffix(str) -- of file
+ return suffixmap[file.extname(str)] or 'tex'
+end
+
+function resolvers.variable_of_format(str)
+ return formats[str] or formats[alternatives[str]] or ''
+end
+
+function resolvers.var_of_format_or_suffix(str)
+ local v = formats[str]
+ if v then
+ return v
+ end
+ v = formats[alternatives[str]]
+ if v then
+ return v
+ end
+ v = suffixmap[fileextname(str)]
+ if v then
+ return formats[v]
+ end
+ return ''
+end
+
diff --git a/tex/context/base/data-exp.lua b/tex/context/base/data-exp.lua
new file mode 100644
index 000000000..785679275
--- /dev/null
+++ b/tex/context/base/data-exp.lua
@@ -0,0 +1,336 @@
+if not modules then modules = { } end modules ['data-exp'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+local format, gsub, find, gmatch, lower = string.format, string.gsub, string.find, string.gmatch, string.lower
+local concat, sort = table.concat, table.sort
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local lpegCt, lpegCs, lpegP, lpegC, lpegS = lpeg.Ct, lpeg.Cs, lpeg.P, lpeg.C, lpeg.S
+local type, next = type, next
+
+local ostype = os.type
+local collapse_path = file.collapse_path
+
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
+
+local report_resolvers = logs.new("resolvers")
+
+-- As this bit of code is somewhat special it gets its own module. After
+-- all, when working on the main resolver code, I don't want to scroll
+-- past this every time.
+
+-- {a,b,c,d}
+-- a,b,c/{p,q,r},d
+-- a,b,c/{p,q,r}/d/{x,y,z}//
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
+
+-- this one is better and faster, but it took me a while to realize
+-- that this kind of replacement is cleaner than messy parsing and
+-- fuzzy concatenating we can probably gain a bit with selectively
+-- applying lpeg, but experiments with lpeg parsing this proved not to
+-- work that well; the parsing is ok, but dealing with the resulting
+-- table is a pain because we need to work inside-out recursively
+
+local dummy_path_expr = "^!*unset/*$"
+
+local function do_first(a,b)
+ local t = { }
+ for s in gmatch(b,"[^,]+") do t[#t+1] = a .. s end
+ return "{" .. concat(t,",") .. "}"
+end
+
+local function do_second(a,b)
+ local t = { }
+ for s in gmatch(a,"[^,]+") do t[#t+1] = s .. b end
+ return "{" .. concat(t,",") .. "}"
+end
+
+local function do_both(a,b)
+ local t = { }
+ for sa in gmatch(a,"[^,]+") do
+ for sb in gmatch(b,"[^,]+") do
+ t[#t+1] = sa .. sb
+ end
+ end
+ return "{" .. concat(t,",") .. "}"
+end
+
+local function do_three(a,b,c)
+ return a .. b.. c
+end
+
+local stripper_1 = lpeg.stripper("{}@")
+
+local replacer_1 = lpeg.replacer {
+ { ",}", ",@}" },
+ { "{,", "{@," },
+}
+
+local function splitpathexpr(str, newlist, validate)
+ -- no need for further optimization as it is only called a
+ -- few times, we can use lpeg for the sub
+ if trace_expansions then
+ report_resolvers("expanding variable '%s'",str)
+ end
+ local t, ok, done = newlist or { }, false, false
+ str = lpegmatch(replacer_1,str)
+ while true do
+ done = false
+ while true do
+ str, ok = gsub(str,"([^{},]+){([^{}]+)}",do_first)
+ if ok > 0 then done = true else break end
+ end
+ while true do
+ str, ok = gsub(str,"{([^{}]+)}([^{},]+)",do_second)
+ if ok > 0 then done = true else break end
+ end
+ while true do
+ str, ok = gsub(str,"{([^{}]+)}{([^{}]+)}",do_both)
+ if ok > 0 then done = true else break end
+ end
+ str, ok = gsub(str,"({[^{}]*){([^{}]+)}([^{}]*})",do_three)
+ if ok > 0 then done = true end
+ if not done then break end
+ end
+ str = lpegmatch(stripper_1,str)
+ if validate then
+ for s in gmatch(str,"[^,]+") do
+ s = validate(s)
+ if s then t[#t+1] = s end
+ end
+ else
+ for s in gmatch(str,"[^,]+") do
+ t[#t+1] = s
+ end
+ end
+ if trace_expansions then
+ for k=1,#t do
+ report_resolvers("% 4i: %s",k,t[k])
+ end
+ end
+ return t
+end
+
+local function validate(s)
+ local isrecursive = find(s,"//$")
+ s = collapse_path(s)
+ if isrecursive then
+ s = s .. "//"
+ end
+ return s ~= "" and not find(s,dummy_path_expr) and s
+end
+
+resolvers.validated_path = validate -- keeps the trailing //
+
+function resolvers.expanded_path_from_list(pathlist) -- maybe not a list, just a path
+ -- a previous version fed back into pathlist
+ local newlist, ok = { }, false
+ for k=1,#pathlist do
+ if find(pathlist[k],"[{}]") then
+ ok = true
+ break
+ end
+ end
+ if ok then
+ for k=1,#pathlist do
+ splitpathexpr(pathlist[k],newlist,validate)
+ end
+ else
+ for k=1,#pathlist do
+ for p in gmatch(pathlist[k],"([^,]+)") do
+--~ p = collapse_path(p)
+ p = validate(p)
+ if p ~= "" then newlist[#newlist+1] = p end
+ end
+ end
+ end
+ return newlist
+end
+
+-- We also put some cleanup code here.
+
+local cleanup -- used recursively
+
+cleanup = lpeg.replacer {
+ { "!", "" },
+ { "\\", "/" },
+ { "~" , function() return lpegmatch(cleanup,environment.homedir) end },
+}
+
+function resolvers.clean_path(str)
+ return str and lpegmatch(cleanup,str)
+end
+
+-- This one strips quotes and funny tokens.
+
+--~ local stripper = lpegCs(
+--~ lpegpatterns.unspacer * lpegpatterns.unsingle
+--~ + lpegpatterns.undouble * lpegpatterns.unspacer
+--~ )
+
+local expandhome = lpegP("~") / "$HOME" -- environment.homedir
+
+local dodouble = lpegP('"')/"" * (expandhome + (1 - lpegP('"')))^0 * lpegP('"')/""
+local dosingle = lpegP("'")/"" * (expandhome + (1 - lpegP("'")))^0 * lpegP("'")/""
+local dostring = (expandhome + 1 )^0
+
+local stripper = lpegCs(
+ lpegpatterns.unspacer * (dosingle + dodouble + dostring) * lpegpatterns.unspacer
+)
+
+function resolvers.checked_variable(str) -- assumes str is a string
+ return lpegmatch(stripper,str) or str
+end
+
+-- The path splitter:
+
+-- A config (optionally) has the paths split in tables. Internally
+-- we join them and split them after the expansion has taken place. This
+-- is more convenient.
+
+--~ local checkedsplit = string.checkedsplit
+
+local cache = { }
+
+local splitter = lpegCt(lpeg.splitat(lpegS(ostype == "windows" and ";" or ":;"))) -- maybe add ,
+
+local function split_configuration_path(str) -- beware, this can be either a path or a { specification }
+ if str then
+ local found = cache[str]
+ if not found then
+ if str == "" then
+ found = { }
+ else
+ str = gsub(str,"\\","/")
+ local split = lpegmatch(splitter,str)
+ found = { }
+ for i=1,#split do
+ local s = split[i]
+ if not find(s,"^{*unset}*") then
+ found[#found+1] = s
+ end
+ end
+ if trace_expansions then
+ report_resolvers("splitting path specification '%s'",str)
+ for k=1,#found do
+ report_resolvers("% 4i: %s",k,found[k])
+ end
+ end
+ cache[str] = found
+ end
+ end
+ return found
+ end
+end
+
+resolvers.split_configuration_path = split_configuration_path
+
+function resolvers.split_path(str)
+ if type(str) == 'table' then
+ return str
+ else
+ return split_configuration_path(str)
+ end
+end
+
+function resolvers.join_path(str)
+ if type(str) == 'table' then
+ return file.join_path(str)
+ else
+ return str
+ end
+end
+
+-- The next function scans directories and returns a hash where the
+-- entries are either strings or tables.
+
+-- starting with . or .. etc or funny char
+
+--~ local l_forbidden = lpegS("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t")
+--~ local l_confusing = lpegP(" ")
+--~ local l_character = lpegpatterns.utf8
+--~ local l_dangerous = lpegP(".")
+
+--~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * lpegP(-1)
+--~ ----- l_normal = l_normal * lpegCc(true) + lpegCc(false)
+
+--~ local function test(str)
+--~ print(str,lpegmatch(l_normal,str))
+--~ end
+--~ test("ヒラギノ明朝 Pro W3")
+--~ test("..ヒラギノ明朝 Pro W3")
+--~ test(":ヒラギノ明朝 Pro W3;")
+--~ test("ヒラギノ明朝 /Pro W3;")
+--~ test("ヒラギノ明朝 Pro W3")
+
+local weird = lpegP(".")^1 + lpeg.anywhere(lpegS("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+
+function resolvers.scan_files(specification)
+ if trace_locating then
+ report_resolvers("scanning path '%s'",specification)
+ end
+ local attributes, directory = lfs.attributes, lfs.dir
+ local files = { __path__ = specification }
+ local n, m, r = 0, 0, 0
+ local function scan(spec,path)
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ n = n + 1
+ local f = files[name]
+ if f then
+ if type(f) == 'string' then
+ files[name] = { f, path }
+ else
+ f[#f+1] = path
+ end
+ else -- probably unique anyway
+ files[name] = path
+ local lower = lower(name)
+ if name ~= lower then
+ files["remap:"..lower] = name
+ r = r + 1
+ end
+ end
+ elseif mode == 'directory' then
+ m = m + 1
+ if path ~= "" then
+ dirs[#dirs+1] = path..'/'..name
+ else
+ dirs[#dirs+1] = name
+ end
+ end
+ end
+ end
+ if #dirs > 0 then
+ sort(dirs)
+ for i=1,#dirs do
+ scan(spec,dirs[i])
+ end
+ end
+ end
+ scan(specification .. '/',"")
+ files.__files__, files.__directories__, files.__remappings__ = n, m, r
+ if trace_locating then
+ report_resolvers("%s files found on %s directories with %s uppercase remappings",n,m,r)
+ end
+ return files
+end
+
+--~ print(table.serialize(resolvers.scan_files("t:/sources")))
diff --git a/tex/context/base/data-ini.lua b/tex/context/base/data-ini.lua
new file mode 100644
index 000000000..5805c4301
--- /dev/null
+++ b/tex/context/base/data-ini.lua
@@ -0,0 +1,222 @@
+if not modules then modules = { } end modules ['data-ini'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+local gsub, find, gmatch = string.gsub, string.find, string.gmatch
+local concat = table.concat
+local next, type = next, type
+
+local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join
+
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end)
+local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
+
+local report_resolvers = logs.new("resolvers")
+
+local ostype, osname, ossetenv, osgetenv = os.type, os.name, os.setenv, os.getenv
+
+-- The code here used to be part of a data-res but for convenience
+-- we now split it over multiple files. As this file is now the
+-- starting point we introduce resolvers here.
+
+resolvers = resolvers or { }
+
+-- We don't want the kpse library to kick in. Also, we want to be able to
+-- execute programs. Control over execution is implemented later.
+
+texconfig.kpse_init = false
+texconfig.shell_escape = 't'
+
+kpse = { original = kpse }
+
+setmetatable(kpse, {
+ __index = function(kp,name)
+ local r = resolvers[name]
+ if not r then
+ r = function (...)
+ report_resolvers("not supported: %s(%s)",name,concat(...))
+ end
+ rawset(kp,name,r)
+ end
+ return r
+ end
+} )
+
+-- First we check a couple of environment variables. Some might be
+-- set already but we need then later on. We start with the system
+-- font path.
+
+do
+
+ local osfontdir = osgetenv("OSFONTDIR")
+
+ if osfontdir and osfontdir ~= "" then
+ -- ok
+ elseif osname == "windows" then
+ ossetenv("OSFONTDIR","c:/windows/fonts//")
+ elseif osname == "macosx" then
+ ossetenv("OSFONTDIR","$HOME/Library/Fonts//;/Library/Fonts//;/System/Library/Fonts//")
+ end
+
+end
+
+-- Next comes the user's home path. We need this as later on we have
+-- to replace ~ with its value.
+
+do
+
+ local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or '~'
+
+ homedir = file.collapse_path(homedir)
+
+ ossetenv("HOME", homedir) -- can be used in unix cnf files
+ ossetenv("USERPROFILE",homedir) -- can be used in windows cnf files
+
+ environment.homedir = homedir
+
+end
+
+-- The following code sets the name of the own binary and its
+-- path. This is fallback code as we have os.selfdir now.
+
+do
+
+ local args = environment.original_arguments or arg -- this needs a cleanup
+
+ local ownbin = environment.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex"
+ local ownpath = environment.ownpath or os.selfdir
+
+ ownbin = file.collapse_path(ownbin)
+ ownpath = file.collapse_path(ownpath)
+
+ if not ownpath or ownpath == "" or ownpath == "unset" then
+ ownpath = args[-1] or arg[-1]
+ ownpath = ownpath and filedirname(gsub(ownpath,"\\","/"))
+ if not ownpath or ownpath == "" then
+ ownpath = args[-0] or arg[-0]
+ ownpath = ownpath and filedirname(gsub(ownpath,"\\","/"))
+ end
+ local binary = ownbin
+ if not ownpath or ownpath == "" then
+ ownpath = ownpath and filedirname(binary)
+ end
+ if not ownpath or ownpath == "" then
+ if os.binsuffix ~= "" then
+ binary = file.replacesuffix(binary,os.binsuffix)
+ end
+ local path = osgetenv("PATH")
+ if path then
+ for p in gmatch(path,"[^"..io.pathseparator.."]+") do
+ local b = filejoin(p,binary)
+ if lfs.isfile(b) then
+ -- we assume that after changing to the path the currentdir function
+ -- resolves to the real location and use this side effect here; this
+ -- trick is needed because on the mac installations use symlinks in the
+ -- path instead of real locations
+ local olddir = lfs.currentdir()
+ if lfs.chdir(p) then
+ local pp = lfs.currentdir()
+ if trace_locating and p ~= pp then
+ report_resolvers("following symlink '%s' to '%s'",p,pp)
+ end
+ ownpath = pp
+ lfs.chdir(olddir)
+ else
+ if trace_locating then
+ report_resolvers("unable to check path '%s'",p)
+ end
+ ownpath = p
+ end
+ break
+ end
+ end
+ end
+ end
+ if not ownpath or ownpath == "" then
+ ownpath = "."
+ report_resolvers("forcing fallback ownpath .")
+ elseif trace_locating then
+ report_resolvers("using ownpath '%s'",ownpath)
+ end
+ end
+
+ environment.ownbin = ownbin
+ environment.ownpath = ownpath
+
+end
+
+resolvers.ownpath = environment.ownpath
+
+function resolvers.getownpath()
+ return environment.ownpath
+end
+
+-- The self variables permit us to use only a few (or even no)
+-- environment variables.
+
+do
+
+ local ownpath = environment.ownpath or dir.current()
+
+ if ownpath then
+ ossetenv('SELFAUTOLOC', file.collapse_path(ownpath))
+ ossetenv('SELFAUTODIR', file.collapse_path(ownpath .. "/.."))
+ ossetenv('SELFAUTOPARENT', file.collapse_path(ownpath .. "/../.."))
+ else
+ report_resolvers("error: unable to locate ownpath")
+ os.exit()
+ end
+
+end
+
+-- The running os:
+
+-- todo: check is context sits here os.platform is more trustworthy
+-- that the bin check as mtx-update runs from another path
+
+local texos = environment.texos or osgetenv("TEXOS")
+local texmfos = environment.texmfos or osgetenv('SELFAUTODIR')
+
+if not texos or texos == "" then
+ texos = file.basename(texmfos)
+end
+
+ossetenv('TEXMFOS', texmfos) -- full bin path
+ossetenv('TEXOS', texos) -- partial bin parent
+ossetenv('SELFAUTOSYSTEM',os.platform) -- bonus
+
+environment.texos = texos
+environment.texmfos = texmfos
+
+-- The current root:
+
+local texroot = environment.texroot or osgetenv("TEXROOT")
+
+if not texroot or texroot == "" then
+ texroot = osgetenv('SELFAUTOPARENT')
+ ossetenv('TEXROOT',texroot)
+end
+
+environment.texroot = file.collapse_path(texroot)
+
+-- Tracing. Todo ...
+
+function resolvers.settrace(n) -- no longer number but: 'locating' or 'detail'
+ if n then
+ trackers.disable("resolvers.*")
+ trackers.enable("resolvers."..n)
+ end
+end
+
+resolvers.settrace(osgetenv("MTX_INPUT_TRACE"))
+
+-- todo:
+
+-- if profiler and osgetenv("MTX_PROFILE_RUN") == "YES" then
+-- profiler.start("luatex-profile.log")
+-- end
diff --git a/tex/context/base/data-lst.lua b/tex/context/base/data-lst.lua
index 82f675486..a09a9743c 100644
--- a/tex/context/base/data-lst.lua
+++ b/tex/context/base/data-lst.lua
@@ -20,35 +20,30 @@ local function tabstr(str)
end
end
-local function list(list,report)
+local function list(list,report,pattern)
+ pattern = pattern and pattern ~= "" and upper(pattern) or ""
local instance = resolvers.instance
- local pat = upper(pattern or "","")
local report = report or texio.write_nl
local sorted = table.sortedkeys(list)
for i=1,#sorted do
local key = sorted[i]
- if instance.pattern == "" or find(upper(key),pat) then
- if instance.kpseonly then
- if instance.kpsevars[key] then
- report(format("%s=%s",key,tabstr(list[key])))
- end
- else
- report(format('%s %s=%s',(instance.kpsevars[key] and 'K') or 'E',key,tabstr(list[key])))
- end
+ if pattern == "" or find(upper(key),pattern) then
+ report(format('%s %s=%s',instance.origins[key] or "---",key,tabstr(list[key])))
end
end
end
-function resolvers.listers.variables () list(resolvers.instance.variables ) end
-function resolvers.listers.expansions() list(resolvers.instance.expansions) end
+function resolvers.listers.variables (report,pattern) list(resolvers.instance.variables, report,pattern) end
+function resolvers.listers.expansions(report,pattern) list(resolvers.instance.expansions,report,pattern) end
-function resolvers.listers.configurations(report)
+function resolvers.listers.configurations(report,pattern)
+ pattern = pattern and pattern ~= "" and upper(pattern) or ""
local report = report or texio.write_nl
local instance = resolvers.instance
local sorted = table.sortedkeys(instance.kpsevars)
for i=1,#sorted do
local key = sorted[i]
- if not instance.pattern or (instance.pattern=="") or find(key,instance.pattern) then
+ if pattern == "" or find(upper(key),pattern) then
report(format("%s\n",key))
local order = instance.order
for i=1,#order do
diff --git a/tex/context/base/data-lua.lua b/tex/context/base/data-lua.lua
index 988133fbe..d11a066e2 100644
--- a/tex/context/base/data-lua.lua
+++ b/tex/context/base/data-lua.lua
@@ -12,6 +12,8 @@ if not modules then modules = { } end modules ['data-lua'] = {
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local report_resolvers = logs.new("resolvers")
+
local gsub, insert = string.gsub, table.insert
local unpack = unpack or table.unpack
@@ -40,7 +42,7 @@ local function thepath(...)
local t = { ... } t[#t+1] = "?.lua"
local path = file.join(unpack(t))
if trace_locating then
- logs.report("fileio","! appending '%s' to 'package.path'",path)
+ report_resolvers("! appending '%s' to 'package.path'",path)
end
return path
end
@@ -62,11 +64,11 @@ local function loaded(libpaths,name,simple)
local libpath = libpaths[i]
local resolved = gsub(libpath,"%?",simple)
if trace_locating then -- more detail
- logs.report("fileio","! checking for '%s' on 'package.path': '%s' => '%s'",simple,libpath,resolved)
+ report_resolvers("! checking for '%s' on 'package.path': '%s' => '%s'",simple,libpath,resolved)
end
if resolvers.isreadable.file(resolved) then
if trace_locating then
- logs.report("fileio","! lib '%s' located via 'package.path': '%s'",name,resolved)
+ report_resolvers("! lib '%s' located via 'package.path': '%s'",name,resolved)
end
return loadfile(resolved)
end
@@ -76,17 +78,17 @@ end
package.loaders[2] = function(name) -- was [#package.loaders+1]
if trace_locating then -- mode detail
- logs.report("fileio","! locating '%s'",name)
+ report_resolvers("! locating '%s'",name)
end
for i=1,#libformats do
local format = libformats[i]
local resolved = resolvers.find_file(name,format) or ""
if trace_locating then -- mode detail
- logs.report("fileio","! checking for '%s' using 'libformat path': '%s'",name,format)
+ report_resolvers("! checking for '%s' using 'libformat path': '%s'",name,format)
end
if resolved ~= "" then
if trace_locating then
- logs.report("fileio","! lib '%s' located via environment: '%s'",name,resolved)
+ report_resolvers("! lib '%s' located via environment: '%s'",name,resolved)
end
return loadfile(resolved)
end
@@ -109,11 +111,11 @@ package.loaders[2] = function(name) -- was [#package.loaders+1]
local path = paths[p]
local resolved = file.join(path,libname)
if trace_locating then -- mode detail
- logs.report("fileio","! checking for '%s' using 'clibformat path': '%s'",libname,path)
+ report_resolvers("! checking for '%s' using 'clibformat path': '%s'",libname,path)
end
if resolvers.isreadable.file(resolved) then
if trace_locating then
- logs.report("fileio","! lib '%s' located via 'clibformat': '%s'",libname,resolved)
+ report_resolvers("! lib '%s' located via 'clibformat': '%s'",libname,resolved)
end
return package.loadlib(resolved,name)
end
@@ -123,28 +125,28 @@ package.loaders[2] = function(name) -- was [#package.loaders+1]
local libpath = clibpaths[i]
local resolved = gsub(libpath,"?",simple)
if trace_locating then -- more detail
- logs.report("fileio","! checking for '%s' on 'package.cpath': '%s'",simple,libpath)
+ report_resolvers("! checking for '%s' on 'package.cpath': '%s'",simple,libpath)
end
if resolvers.isreadable.file(resolved) then
if trace_locating then
- logs.report("fileio","! lib '%s' located via 'package.cpath': '%s'",name,resolved)
+ report_resolvers("! lib '%s' located via 'package.cpath': '%s'",name,resolved)
end
return package.loadlib(resolved,name)
end
end
-- just in case the distribution is messed up
if trace_loading then -- more detail
- logs.report("fileio","! checking for '%s' using 'luatexlibs': '%s'",name)
+ report_resolvers("! checking for '%s' using 'luatexlibs': '%s'",name)
end
local resolved = resolvers.find_file(file.basename(name),'luatexlibs') or ""
if resolved ~= "" then
if trace_locating then
- logs.report("fileio","! lib '%s' located by basename via environment: '%s'",name,resolved)
+ report_resolvers("! lib '%s' located by basename via environment: '%s'",name,resolved)
end
return loadfile(resolved)
end
if trace_locating then
- logs.report("fileio",'? unable to locate lib: %s',name)
+ report_resolvers('? unable to locate lib: %s',name)
end
-- return "unable to locate " .. name
end
diff --git a/tex/context/base/data-met.lua b/tex/context/base/data-met.lua
new file mode 100644
index 000000000..7f2e612e8
--- /dev/null
+++ b/tex/context/base/data-met.lua
@@ -0,0 +1,45 @@
+if not modules then modules = { } end modules ['data-met'] = {
+ version = 1.100,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local find = string.find
+
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+
+local report_resolvers = logs.new("resolvers")
+
+resolvers.locators = { notfound = { nil } } -- locate databases
+resolvers.hashers = { notfound = { nil } } -- load databases
+resolvers.generators = { notfound = { nil } } -- generate databases
+
+function resolvers.splitmethod(filename)
+ if not filename then
+ return { } -- safeguard
+ elseif type(filename) == "table" then
+ return filename -- already split
+ elseif not find(filename,"://") then
+ return { scheme="file", path = filename, original = filename } -- quick hack
+ else
+ return url.hashed(filename)
+ end
+end
+
+function resolvers.methodhandler(what, filename, filetype) -- ...
+ filename = file.collapse_path(filename)
+ local specification = (type(filename) == "string" and resolvers.splitmethod(filename)) or filename -- no or { }, let it bomb
+ local scheme = specification.scheme
+ local resolver = resolvers[what]
+ if resolver[scheme] then
+ if trace_locating then
+ report_resolvers("handler '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification))
+ end
+ return resolver[scheme](filename,filetype)
+ else
+ return resolver.tex(filename,filetype) -- todo: specification
+ end
+end
+
diff --git a/tex/context/base/data-pre.lua b/tex/context/base/data-pre.lua
index 9348f6cd3..391ee2ccd 100644
--- a/tex/context/base/data-pre.lua
+++ b/tex/context/base/data-pre.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['data-res'] = {
+if not modules then modules = { } end modules ['data-pre'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -12,8 +12,10 @@ local upper, lower, gsub = string.upper, string.lower, string.gsub
local prefixes = { }
-prefixes.environment = function(str)
- return resolvers.clean_path(os.getenv(str) or os.getenv(upper(str)) or os.getenv(lower(str)) or "")
+local getenv = resolvers.getenv
+
+prefixes.environment = function(str) -- getenv is case insensitive anyway
+ return resolvers.clean_path(getenv(str) or getenv(upper(str)) or getenv(lower(str)) or "")
end
prefixes.relative = function(str,n)
diff --git a/tex/context/base/data-res.lua b/tex/context/base/data-res.lua
index ecef14188..b2a59d753 100644
--- a/tex/context/base/data-res.lua
+++ b/tex/context/base/data-res.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['data-inp'] = {
+if not modules then modules = { } end modules ['data-res'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -6,70 +6,45 @@ if not modules then modules = { } end modules ['data-inp'] = {
license = "see context related readme files",
}
--- After a few years using the code the large luat-inp.lua file
--- has been split up a bit. In the process some functionality was
--- dropped:
---
--- * support for reading lsr files
--- * selective scanning (subtrees)
--- * some public auxiliary functions were made private
---
--- TODO: os.getenv -> os.env[]
--- TODO: instances.[hashes,cnffiles,configurations,522]
--- TODO: check escaping in find etc, too much, too slow
-
--- This lib is multi-purpose and can be loaded again later on so that
--- additional functionality becomes available. We will split thislogs.report("fileio",
--- module in components once we're done with prototyping. This is the
--- first code I wrote for LuaTeX, so it needs some cleanup. Before changing
--- something in this module one can best check with Taco or Hans first; there
--- is some nasty trickery going on that relates to traditional kpse support.
-
--- To be considered: hash key lowercase, first entry in table filename
--- (any case), rest paths (so no need for optimization). Or maybe a
--- separate table that matches lowercase names to mixed case when
--- present. In that case the lower() cases can go away. I will do that
--- only when we run into problems with names ... well ... Iwona-Regular.
-
--- Beware, loading and saving is overloaded in luat-tmp!
+-- In practice we will work within one tds tree, but i want to keep
+-- the option open to build tools that look at multiple trees, which is
+-- why we keep the tree specific data in a table. We used to pass the
+-- instance but for practical purposes we now avoid this and use a
+-- instance variable. We always have one instance active (sort of global).
+
+-- todo: cache:/// home:///
local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
local next, type = next, type
-local lpegmatch = lpeg.match
-
-local trace_locating, trace_detail, trace_expansions = false, false, false
-
-trackers.register("resolvers.locating", function(v) trace_locating = v end)
-trackers.register("resolvers.details", function(v) trace_detail = v end)
-trackers.register("resolvers.expansions", function(v) trace_expansions = v end) -- todo
-
-if not resolvers then
- resolvers = {
- suffixes = { },
- formats = { },
- dangerous = { },
- suffixmap = { },
- alternatives = { },
- locators = { }, -- locate databases
- hashers = { }, -- load databases
- generators = { }, -- generate databases
- }
-end
-local resolvers = resolvers
+local lpegP, lpegS, lpegR, lpegC, lpegCc, lpegCs, lpegCt = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+
+local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join
+local collapse_path = file.collapse_path
-resolvers.locators .notfound = { nil }
-resolvers.hashers .notfound = { nil }
-resolvers.generators.notfound = { nil }
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end)
+local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
+
+local report_resolvers = logs.new("resolvers")
+
+local expanded_path_from_list = resolvers.expanded_path_from_list
+local checked_variable = resolvers.checked_variable
+local split_configuration_path = resolvers.split_configuration_path
+
+local ostype, osname, osenv, ossetenv, osgetenv = os.type, os.name, os.env, os.setenv, os.getenv
resolvers.cacheversion = '1.0.1'
-resolvers.cnfname = 'texmf.cnf'
-resolvers.luaname = 'texmfcnf.lua'
-resolvers.homedir = os.env[os.type == "windows" and 'USERPROFILE'] or os.env['HOME'] or '~'
-resolvers.cnfdefault = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}'
+resolvers.configbanner = ''
+resolvers.homedir = environment.homedir
+resolvers.criticalvars = { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF", "TEXMF", "TEXOS" }
+resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}' -- rubish path
+resolvers.luacnfname = 'texmfcnf.lua'
+resolvers.luacnfstate = "unknown"
-local dummy_path_expr = "^!*unset/*$"
+local unset_variable = "unset"
local formats = resolvers.formats
local suffixes = resolvers.suffixes
@@ -77,104 +52,12 @@ local dangerous = resolvers.dangerous
local suffixmap = resolvers.suffixmap
local alternatives = resolvers.alternatives
-formats['afm'] = 'AFMFONTS' suffixes['afm'] = { 'afm' }
-formats['enc'] = 'ENCFONTS' suffixes['enc'] = { 'enc' }
-formats['fmt'] = 'TEXFORMATS' suffixes['fmt'] = { 'fmt' }
-formats['map'] = 'TEXFONTMAPS' suffixes['map'] = { 'map' }
-formats['mp'] = 'MPINPUTS' suffixes['mp'] = { 'mp' }
-formats['ocp'] = 'OCPINPUTS' suffixes['ocp'] = { 'ocp' }
-formats['ofm'] = 'OFMFONTS' suffixes['ofm'] = { 'ofm', 'tfm' }
-formats['otf'] = 'OPENTYPEFONTS' suffixes['otf'] = { 'otf' } -- 'ttf'
-formats['opl'] = 'OPLFONTS' suffixes['opl'] = { 'opl' }
-formats['otp'] = 'OTPINPUTS' suffixes['otp'] = { 'otp' }
-formats['ovf'] = 'OVFFONTS' suffixes['ovf'] = { 'ovf', 'vf' }
-formats['ovp'] = 'OVPFONTS' suffixes['ovp'] = { 'ovp' }
-formats['tex'] = 'TEXINPUTS' suffixes['tex'] = { 'tex' }
-formats['tfm'] = 'TFMFONTS' suffixes['tfm'] = { 'tfm' }
-formats['ttf'] = 'TTFONTS' suffixes['ttf'] = { 'ttf', 'ttc', 'dfont' }
-formats['pfb'] = 'T1FONTS' suffixes['pfb'] = { 'pfb', 'pfa' }
-formats['vf'] = 'VFFONTS' suffixes['vf'] = { 'vf' }
-
-formats['fea'] = 'FONTFEATURES' suffixes['fea'] = { 'fea' }
-formats['cid'] = 'FONTCIDMAPS' suffixes['cid'] = { 'cid', 'cidmap' }
-
-formats ['texmfscripts'] = 'TEXMFSCRIPTS' -- new
-suffixes['texmfscripts'] = { 'rb', 'pl', 'py' } -- 'lua'
-
-formats ['lua'] = 'LUAINPUTS' -- new
-suffixes['lua'] = { 'lua', 'luc', 'tma', 'tmc' }
-
--- backward compatible ones
-
-alternatives['map files'] = 'map'
-alternatives['enc files'] = 'enc'
-alternatives['cid maps'] = 'cid' -- great, why no cid files
-alternatives['font feature files'] = 'fea' -- and fea files here
-alternatives['opentype fonts'] = 'otf'
-alternatives['truetype fonts'] = 'ttf'
-alternatives['truetype collections'] = 'ttc'
-alternatives['truetype dictionary'] = 'dfont'
-alternatives['type1 fonts'] = 'pfb'
-
--- obscure ones
-
-formats ['misc fonts'] = ''
-suffixes['misc fonts'] = { }
-
-formats ['sfd'] = 'SFDFONTS'
-suffixes ['sfd'] = { 'sfd' }
-alternatives['subfont definition files'] = 'sfd'
-
--- lib paths
-
-formats ['lib'] = 'CLUAINPUTS' -- new (needs checking)
-suffixes['lib'] = (os.libsuffix and { os.libsuffix }) or { 'dll', 'so' }
-
--- In practice we will work within one tds tree, but i want to keep
--- the option open to build tools that look at multiple trees, which is
--- why we keep the tree specific data in a table. We used to pass the
--- instance but for practical pusposes we now avoid this and use a
--- instance variable.
-
--- here we catch a few new thingies (todo: add these paths to context.tmf)
---
--- FONTFEATURES = .;$TEXMF/fonts/fea//
--- FONTCIDMAPS = .;$TEXMF/fonts/cid//
-
--- we always have one instance active
-
resolvers.instance = resolvers.instance or nil -- the current one (slow access)
-local instance = resolvers.instance or nil -- the current one (fast access)
+local instance = resolvers.instance or nil -- the current one (fast access)
function resolvers.newinstance()
- -- store once, freeze and faster (once reset we can best use
- -- instance.environment) maybe better have a register suffix
- -- function
-
- for k, v in next, suffixes do
- for i=1,#v do
- local vi = v[i]
- if vi then
- suffixmap[vi] = k
- end
- end
- end
-
- -- because vf searching is somewhat dangerous, we want to prevent
- -- too liberal searching esp because we do a lookup on the current
- -- path anyway; only tex (or any) is safe
-
- for k, v in next, formats do
- dangerous[k] = true
- end
- dangerous.tex = nil
-
- -- the instance
-
local newinstance = {
- rootpath = '',
- treepath = '',
progname = 'context',
engine = 'luatex',
format = '',
@@ -182,26 +65,19 @@ function resolvers.newinstance()
variables = { },
expansions = { },
files = { },
- remap = { },
- configuration = { },
- setup = { },
+ setups = { },
order = { },
found = { },
foundintrees = { },
- kpsevars = { },
+ origins = { },
hashes = { },
- cnffiles = { },
- luafiles = { },
+ specification = { },
lists = { },
remember = true,
diskcache = true,
renewcache = false,
- scandisk = true,
- cachepath = nil,
loaderror = false,
- sortdata = false,
savelists = true,
- cleanuppaths = true,
allresults = false,
pattern = nil, -- lists
data = { }, -- only for loading
@@ -211,8 +87,8 @@ function resolvers.newinstance()
local ne = newinstance.environment
- for k,v in next, os.env do
- ne[k] = resolvers.bare_variable(v)
+ for k, v in next, osenv do
+ ne[upper(k)] = checked_variable(v)
end
return newinstance
@@ -234,91 +110,70 @@ local function reset_hashes()
instance.found = { }
end
-local function check_configuration() -- not yet ok, no time for debugging now
- local ie, iv = instance.environment, instance.variables
- local function fix(varname,default)
- local proname = varname .. "." .. instance.progname or "crap"
- local p, v = ie[proname], ie[varname] or iv[varname]
- if not ((p and p ~= "") or (v and v ~= "")) then
- iv[varname] = default -- or environment?
- end
- end
- local name = os.name
- if name == "windows" then
- fix("OSFONTDIR", "c:/windows/fonts//")
- elseif name == "macosx" then
- fix("OSFONTDIR", "$HOME/Library/Fonts//;/Library/Fonts//;/System/Library/Fonts//")
- else
- -- bad luck
- end
- fix("LUAINPUTS" , ".;$TEXINPUTS;$TEXMFSCRIPTS") -- no progname, hm
- -- this will go away some day
- fix("FONTFEATURES", ".;$TEXMF/fonts/{data,fea}//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS")
- fix("FONTCIDMAPS" , ".;$TEXMF/fonts/{data,cid}//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS")
- --
- fix("LUATEXLIBS" , ".;$TEXMF/luatex/lua//")
-end
-
-function resolvers.bare_variable(str) -- assumes str is a string
- return (gsub(str,"\s*([\"\']?)(.+)%1\s*", "%2"))
-end
-
-function resolvers.settrace(n) -- no longer number but: 'locating' or 'detail'
- if n then
- trackers.disable("resolvers.*")
- trackers.enable("resolvers."..n)
+function resolvers.setenv(key,value)
+ if instance then
+ instance.environment[key] = value
+ ossetenv(key,value)
end
end
-resolvers.settrace(os.getenv("MTX_INPUT_TRACE"))
-
-function resolvers.osenv(key)
- local ie = instance.environment
- local value = ie[key]
- if value == nil then
- -- local e = os.getenv(key)
- local e = os.env[key]
- if e == nil then
- -- value = "" -- false
- else
- value = resolvers.bare_variable(e)
- end
- ie[key] = value
+function resolvers.getenv(key)
+ local value = instance.environment[key]
+ if value and value ~= "" then
+ return value
+ else
+ local e = osgetenv(key)
+ return e ~= nil and e ~= "" and checked_variable(e) or ""
end
- return value or ""
end
-function resolvers.env(key)
- return instance.environment[key] or resolvers.osenv(key)
-end
-
---
+resolvers.env = resolvers.getenv
local function expand_vars(lst) -- simple vars
- local variables, env = instance.variables, resolvers.env
+ local variables, getenv = instance.variables, resolvers.getenv
local function resolve(a)
- return variables[a] or env(a)
+ local va = variables[a] or ""
+ return (va ~= "" and va) or getenv(a) or ""
end
for k=1,#lst do
- lst[k] = gsub(lst[k],"%$([%a%d%_%-]+)",resolve)
+ local var = lst[k]
+ var = gsub(var,"%$([%a%d%_%-]+)",resolve)
+ var = gsub(var,";+",";")
+ var = gsub(var,";[!{}/\\]+;",";")
+--~ var = gsub(var,"~",resolvers.homedir)
+ lst[k] = var
end
end
-local function expanded_var(var) -- simple vars
- local function resolve(a)
- return instance.variables[a] or resolvers.env(a)
+local function resolve(key)
+ local value = instance.variables[key]
+ if value and value ~= "" then
+ return value
end
- return (gsub(var,"%$([%a%d%_%-]+)",resolve))
+ local value = instance.environment[key]
+ if value and value ~= "" then
+ return value
+ end
+ local e = osgetenv(key)
+ return e ~= nil and e ~= "" and checked_variable(e) or ""
+end
+
+local function expanded_var(var) -- simple vars
+ var = gsub(var,"%$([%a%d%_%-]+)",resolve)
+ var = gsub(var,";+",";")
+ var = gsub(var,";[!{}/\\]+;",";")
+--~ var = gsub(var,"~",resolvers.homedir)
+ return var
end
local function entry(entries,name)
- if name and (name ~= "") then
+ if name and name ~= "" then
name = gsub(name,'%$','')
local result = entries[name..'.'..instance.progname] or entries[name]
if result then
return result
else
- result = resolvers.env(name)
+ result = resolvers.getenv(name)
if result then
instance.variables[name] = result
resolvers.expand_variables()
@@ -338,438 +193,147 @@ local function is_entry(entries,name)
end
end
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
--- this one is better and faster, but it took me a while to realize
--- that this kind of replacement is cleaner than messy parsing and
--- fuzzy concatenating we can probably gain a bit with selectively
--- applying lpeg, but experiments with lpeg parsing this proved not to
--- work that well; the parsing is ok, but dealing with the resulting
--- table is a pain because we need to work inside-out recursively
-
-local function do_first(a,b)
- local t = { }
- for s in gmatch(b,"[^,]+") do t[#t+1] = a .. s end
- return "{" .. concat(t,",") .. "}"
-end
-
-local function do_second(a,b)
- local t = { }
- for s in gmatch(a,"[^,]+") do t[#t+1] = s .. b end
- return "{" .. concat(t,",") .. "}"
-end
-
-local function do_both(a,b)
- local t = { }
- for sa in gmatch(a,"[^,]+") do
- for sb in gmatch(b,"[^,]+") do
- t[#t+1] = sa .. sb
- end
- end
- return "{" .. concat(t,",") .. "}"
-end
-
-local function do_three(a,b,c)
- return a .. b.. c
-end
-
-local function splitpathexpr(str, t, validate)
- -- no need for further optimization as it is only called a
- -- few times, we can use lpeg for the sub
- if trace_expansions then
- logs.report("fileio","expanding variable '%s'",str)
- end
- t = t or { }
- str = gsub(str,",}",",@}")
- str = gsub(str,"{,","{@,")
- -- str = "@" .. str .. "@"
- local ok, done
- while true do
- done = false
- while true do
- str, ok = gsub(str,"([^{},]+){([^{}]+)}",do_first)
- if ok > 0 then done = true else break end
- end
- while true do
- str, ok = gsub(str,"{([^{}]+)}([^{},]+)",do_second)
- if ok > 0 then done = true else break end
- end
- while true do
- str, ok = gsub(str,"{([^{}]+)}{([^{}]+)}",do_both)
- if ok > 0 then done = true else break end
- end
- str, ok = gsub(str,"({[^{}]*){([^{}]+)}([^{}]*})",do_three)
- if ok > 0 then done = true end
- if not done then break end
- end
- str = gsub(str,"[{}]", "")
- str = gsub(str,"@","")
- if validate then
- for s in gmatch(str,"[^,]+") do
- s = validate(s)
- if s then t[#t+1] = s end
- end
- else
- for s in gmatch(str,"[^,]+") do
- t[#t+1] = s
- end
- end
- if trace_expansions then
- for k=1,#t do
- logs.report("fileio","% 4i: %s",k,t[k])
- end
- end
- return t
-end
-
-local function expanded_path_from_list(pathlist) -- maybe not a list, just a path
- -- a previous version fed back into pathlist
- local newlist, ok = { }, false
- for k=1,#pathlist do
- if find(pathlist[k],"[{}]") then
- ok = true
- break
- end
- end
- if ok then
- local function validate(s)
- s = file.collapse_path(s)
- return s ~= "" and not find(s,dummy_path_expr) and s
- end
- for k=1,#pathlist do
- splitpathexpr(pathlist[k],newlist,validate)
- end
- else
- for k=1,#pathlist do
- for p in gmatch(pathlist[k],"([^,]+)") do
- p = file.collapse_path(p)
- if p ~= "" then newlist[#newlist+1] = p end
- end
- end
- end
- return newlist
-end
-
--- we follow a rather traditional approach:
---
--- (1) texmf.cnf given in TEXMFCNF
--- (2) texmf.cnf searched in default variable
---
--- also we now follow the stupid route: if not set then just assume *one*
--- cnf file under texmf (i.e. distribution)
-
-local args = environment and environment.original_arguments or arg -- this needs a cleanup
-
-resolvers.ownbin = resolvers.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex"
-resolvers.ownbin = gsub(resolvers.ownbin,"\\","/")
-
-function resolvers.getownpath()
- local ownpath = resolvers.ownpath or os.selfdir
- if not ownpath or ownpath == "" or ownpath == "unset" then
- ownpath = args[-1] or arg[-1]
- ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/"))
- if not ownpath or ownpath == "" then
- ownpath = args[-0] or arg[-0]
- ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/"))
- end
- local binary = resolvers.ownbin
- if not ownpath or ownpath == "" then
- ownpath = ownpath and file.dirname(binary)
- end
- if not ownpath or ownpath == "" then
- if os.binsuffix ~= "" then
- binary = file.replacesuffix(binary,os.binsuffix)
- end
- for p in gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do
- local b = file.join(p,binary)
- if lfs.isfile(b) then
- -- we assume that after changing to the path the currentdir function
- -- resolves to the real location and use this side effect here; this
- -- trick is needed because on the mac installations use symlinks in the
- -- path instead of real locations
- local olddir = lfs.currentdir()
- if lfs.chdir(p) then
- local pp = lfs.currentdir()
- if trace_locating and p ~= pp then
- logs.report("fileio","following symlink '%s' to '%s'",p,pp)
- end
- ownpath = pp
- lfs.chdir(olddir)
- else
- if trace_locating then
- logs.report("fileio","unable to check path '%s'",p)
- end
- ownpath = p
- end
- break
- end
- end
- end
- if not ownpath or ownpath == "" then
- ownpath = "."
- logs.report("fileio","forcing fallback ownpath .")
- elseif trace_locating then
- logs.report("fileio","using ownpath '%s'",ownpath)
- end
- end
- resolvers.ownpath = ownpath
- function resolvers.getownpath()
- return resolvers.ownpath
- end
- return ownpath
-end
-
-local own_places = { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF" }
-
-local function identify_own()
- local ownpath = resolvers.getownpath() or dir.current()
- local ie = instance.environment
- if ownpath then
- if resolvers.env('SELFAUTOLOC') == "" then os.env['SELFAUTOLOC'] = file.collapse_path(ownpath) end
- if resolvers.env('SELFAUTODIR') == "" then os.env['SELFAUTODIR'] = file.collapse_path(ownpath .. "/..") end
- if resolvers.env('SELFAUTOPARENT') == "" then os.env['SELFAUTOPARENT'] = file.collapse_path(ownpath .. "/../..") end
- else
- logs.report("fileio","error: unable to locate ownpath")
- os.exit()
- end
- if resolvers.env('TEXMFCNF') == "" then os.env['TEXMFCNF'] = resolvers.cnfdefault end
- if resolvers.env('TEXOS') == "" then os.env['TEXOS'] = resolvers.env('SELFAUTODIR') end
- if resolvers.env('TEXROOT') == "" then os.env['TEXROOT'] = resolvers.env('SELFAUTOPARENT') end
+function resolvers.report_critical_variables()
if trace_locating then
- for i=1,#own_places do
- local v = own_places[i]
- logs.report("fileio","variable '%s' set to '%s'",v,resolvers.env(v) or "unknown")
+ for i=1,#resolvers.criticalvars do
+ local v = resolvers.criticalvars[i]
+ report_resolvers("variable '%s' set to '%s'",v,resolvers.getenv(v) or "unknown")
end
+ report_resolvers()
end
- identify_own = function() end
+ resolvers.report_critical_variables = function() end
end
-function resolvers.identify_cnf()
- if #instance.cnffiles == 0 then
- -- fallback
- identify_own()
- -- the real search
+local function identify_configuration_files()
+ local specification = instance.specification
+ if #specification == 0 then
+ local cnfspec = resolvers.getenv('TEXMFCNF')
+ if cnfspec == "" then
+ cnfspec = resolvers.luacnfspec
+ resolvers.luacnfstate = "default"
+ else
+ resolvers.luacnfstate = "environment"
+ end
+ resolvers.report_critical_variables()
resolvers.expand_variables()
- local t = resolvers.split_path(resolvers.env('TEXMFCNF'))
- t = expanded_path_from_list(t)
- expand_vars(t) -- redundant
- local function locate(filename,list)
- for i=1,#t do
- local ti = t[i]
- local texmfcnf = file.collapse_path(file.join(ti,filename))
- if lfs.isfile(texmfcnf) then
- list[#list+1] = texmfcnf
- end
+ local cnfpaths = expanded_path_from_list(resolvers.split_path(cnfspec))
+ expand_vars(cnfpaths) --- hm
+ local luacnfname = resolvers.luacnfname
+ for i=1,#cnfpaths do
+ local filename = collapse_path(filejoin(cnfpaths[i],luacnfname))
+ if lfs.isfile(filename) then
+ specification[#specification+1] = filename
end
end
- locate(resolvers.luaname,instance.luafiles)
- locate(resolvers.cnfname,instance.cnffiles)
end
end
-local function load_cnf_file(fname)
- fname = resolvers.clean_path(fname)
- local lname = file.replacesuffix(fname,'lua')
- if lfs.isfile(lname) then
- local dname = file.dirname(fname) -- fname ?
- if not instance.configuration[dname] then
- resolvers.load_data(dname,'configuration',lname and file.basename(lname))
- instance.order[#instance.order+1] = instance.configuration[dname]
- end
- else
- f = io.open(fname)
- if f then
- if trace_locating then
- logs.report("fileio","loading configuration file %s", fname)
- end
- local line, data, n, k, v
- local dname = file.dirname(fname)
- if not instance.configuration[dname] then
- instance.configuration[dname] = { }
- instance.order[#instance.order+1] = instance.configuration[dname]
- end
- local data = instance.configuration[dname]
- while true do
- local line, n = f:read(), 0
- if line then
- while true do -- join lines
- line, n = gsub(line,"\\%s*$", "")
- if n > 0 then
- line = line .. f:read()
- else
- break
+local function load_configuration_files()
+ local specification = instance.specification
+ if #specification > 0 then
+ local luacnfname = resolvers.luacnfname
+ for i=1,#specification do
+ local filename = specification[i]
+ local pathname = filedirname(filename)
+ local filename = filejoin(pathname,luacnfname)
+ local blob = loadfile(filename)
+ if blob then
+ local data = blob()
+ data = data and data.content
+ local setups = instance.setups
+ if data then
+ if trace_locating then
+ report_resolvers("loading configuration file '%s'",filename)
+ report_resolvers()
+ end
+ -- flattening is easier to deal with as we need to collapse
+ local t = { }
+ for k, v in next, data do -- v = progname
+ if v ~= unset_variable then
+ local kind = type(v)
+ if kind == "string" then
+ t[k] = v
+ elseif kind == "table" then
+ -- this operates on the table directly
+ setters.initialize(filename,k,v)
+ -- this doesn't (maybe metatables some day)
+ for kk, vv in next, v do -- vv = variable
+ if vv ~= unset_variable then
+ if type(vv) == "string" then
+ t[kk.."."..k] = vv
+ end
+ end
+ end
+ else
+ -- report_resolvers("strange key '%s' in configuration file '%s'",k,filename)
+ end
end
end
- if not find(line,"^[%%#]") then
- local l = gsub(line,"%s*%%.*$","")
- local k, v = match(l,"%s*(.-)%s*=%s*(.-)%s*$")
- if k and v and not data[k] then
- v = gsub(v,"[%%#].*",'')
- data[k] = gsub(v,"~","$HOME")
- instance.kpsevars[k] = true
+ setups[pathname] = t
+
+ if resolvers.luacnfstate == "default" then
+ -- the following code is not tested
+ local cnfspec = t["TEXMFCNF"]
+ if cnfspec then
+ -- we push the value into the main environment (osenv) so
+ -- that it takes precedence over the default one and therefore
+ -- also over following definitions
+ resolvers.setenv('TEXMFCNF',cnfspec)
+ -- we now identify and load the specified configuration files
+ instance.specification = { }
+ identify_configuration_files()
+ load_configuration_files()
+ -- we prevent further overload of the configuration variable
+ resolvers.luacnfstate = "configuration"
+ -- we quit the outer loop
+ break
end
end
+
else
- break
+ if trace_locating then
+ report_resolvers("skipping configuration file '%s'",filename)
+ end
+ setups[pathname] = { }
+ instance.loaderror = true
end
+ elseif trace_locating then
+ report_resolvers("skipping configuration file '%s'",filename)
+ end
+ instance.order[#instance.order+1] = instance.setups[pathname]
+ if instance.loaderror then
+ break
end
- f:close()
- elseif trace_locating then
- logs.report("fileio","skipping configuration file '%s'", fname)
end
+ elseif trace_locating then
+ report_resolvers("warning: no lua configuration files found")
end
end
-local function collapse_cnf_data() -- potential optimization: pass start index (setup and configuration are shared)
- local order = instance.order
+local function collapse_configuration_data() -- potential optimization: pass start index (setup and configuration are shared)
+ local order, variables, environment, origins = instance.order, instance.variables, instance.environment, instance.origins
for i=1,#order do
local c = order[i]
for k,v in next, c do
- if not instance.variables[k] then
- if instance.environment[k] then
- instance.variables[k] = instance.environment[k]
+ if variables[k] then
+ -- okay
+ else
+ local ek = environment[k]
+ if ek and ek ~= "" then
+ variables[k], origins[k] = ek, "env"
else
- instance.kpsevars[k] = true
- instance.variables[k] = resolvers.bare_variable(v)
+ local bv = checked_variable(v)
+ variables[k], origins[k] = bv, "cnf"
end
end
end
end
end
-function resolvers.load_cnf()
- local function loadoldconfigdata()
- local cnffiles = instance.cnffiles
- for i=1,#cnffiles do
- load_cnf_file(cnffiles[i])
- end
- end
- -- instance.cnffiles contain complete names now !
- -- we still use a funny mix of cnf and new but soon
- -- we will switch to lua exclusively as we only use
- -- the file to collect the tree roots
- if #instance.cnffiles == 0 then
- if trace_locating then
- logs.report("fileio","no cnf files found (TEXMFCNF may not be set/known)")
- end
- else
- local cnffiles = instance.cnffiles
- instance.rootpath = cnffiles[1]
- for k=1,#cnffiles do
- instance.cnffiles[k] = file.collapse_path(cnffiles[k])
- end
- for i=1,3 do
- instance.rootpath = file.dirname(instance.rootpath)
- end
- instance.rootpath = file.collapse_path(instance.rootpath)
- if instance.diskcache and not instance.renewcache then
- resolvers.loadoldconfig(instance.cnffiles)
- if instance.loaderror then
- loadoldconfigdata()
- resolvers.saveoldconfig()
- end
- else
- loadoldconfigdata()
- if instance.renewcache then
- resolvers.saveoldconfig()
- end
- end
- collapse_cnf_data()
- end
- check_configuration()
-end
-
-function resolvers.load_lua()
- if #instance.luafiles == 0 then
- -- yet harmless
- else
- instance.rootpath = instance.luafiles[1]
- local luafiles = instance.luafiles
- for k=1,#luafiles do
- instance.luafiles[k] = file.collapse_path(luafiles[k])
- end
- for i=1,3 do
- instance.rootpath = file.dirname(instance.rootpath)
- end
- instance.rootpath = file.collapse_path(instance.rootpath)
- resolvers.loadnewconfig()
- collapse_cnf_data()
- end
- check_configuration()
-end
-
-- database loading
-function resolvers.load_hash()
- resolvers.locatelists()
- if instance.diskcache and not instance.renewcache then
- resolvers.loadfiles()
- if instance.loaderror then
- resolvers.loadlists()
- resolvers.savefiles()
- end
- else
- resolvers.loadlists()
- if instance.renewcache then
- resolvers.savefiles()
- end
- end
-end
-
-function resolvers.append_hash(type,tag,name)
- if trace_locating then
- logs.report("fileio","hash '%s' appended",tag)
- end
- insert(instance.hashes, { ['type']=type, ['tag']=tag, ['name']=name } )
-end
-
-function resolvers.prepend_hash(type,tag,name)
- if trace_locating then
- logs.report("fileio","hash '%s' prepended",tag)
- end
- insert(instance.hashes, 1, { ['type']=type, ['tag']=tag, ['name']=name } )
-end
-
-function resolvers.extend_texmf_var(specification) -- crap, we could better prepend the hash
--- local t = resolvers.expanded_path_list('TEXMF') -- full expansion
- local t = resolvers.split_path(resolvers.env('TEXMF'))
- insert(t,1,specification)
- local newspec = concat(t,";")
- if instance.environment["TEXMF"] then
- instance.environment["TEXMF"] = newspec
- elseif instance.variables["TEXMF"] then
- instance.variables["TEXMF"] = newspec
- else
- -- weird
- end
- resolvers.expand_variables()
- reset_hashes()
-end
-
-- locators
-function resolvers.locatelists()
- local texmfpaths = resolvers.clean_path_list('TEXMF')
- for i=1,#texmfpaths do
- local path = texmfpaths[i]
- if trace_locating then
- logs.report("fileio","locating list of '%s'",path)
- end
- resolvers.locatedatabase(file.collapse_path(path))
- end
-end
-
function resolvers.locatedatabase(specification)
return resolvers.methodhandler('locators', specification)
end
@@ -777,11 +341,11 @@ end
function resolvers.locators.tex(specification)
if specification and specification ~= '' and lfs.isdir(specification) then
if trace_locating then
- logs.report("fileio","tex locator '%s' found",specification)
+ report_resolvers("tex locator '%s' found",specification)
end
- resolvers.append_hash('file',specification,filename)
+ resolvers.append_hash('file',specification,filename,true) -- cache
elseif trace_locating then
- logs.report("fileio","tex locator '%s' not found",specification)
+ report_resolvers("tex locator '%s' not found",specification)
end
end
@@ -791,9 +355,8 @@ function resolvers.hashdatabase(tag,name)
return resolvers.methodhandler('hashers',tag,name)
end
-function resolvers.loadfiles()
- instance.loaderror = false
- instance.files = { }
+local function load_file_databases()
+ instance.loaderror, instance.files = false, { }
if not instance.renewcache then
local hashes = instance.hashes
for k=1,#hashes do
@@ -804,194 +367,134 @@ function resolvers.loadfiles()
end
end
-function resolvers.hashers.tex(tag,name)
- resolvers.load_data(tag,'files')
-end
-
--- generators:
-
-function resolvers.loadlists()
- local hashes = instance.hashes
- for i=1,#hashes do
- resolvers.generatedatabase(hashes[i].tag)
+function resolvers.hashers.tex(tag,name) -- used where?
+ local content = caches.loadcontent(tag,'files')
+ if content then
+ instance.files[tag] = content
+ else
+ instance.files[tag] = { }
+ instance.loaderror = true
end
end
-function resolvers.generatedatabase(specification)
- return resolvers.methodhandler('generators', specification)
-end
-
--- starting with . or .. etc or funny char
-
-local weird = lpeg.P(".")^1 + lpeg.anywhere(lpeg.S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
-
---~ local l_forbidden = lpeg.S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t")
---~ local l_confusing = lpeg.P(" ")
---~ local l_character = lpeg.patterns.utf8
---~ local l_dangerous = lpeg.P(".")
-
---~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * lpeg.P(-1)
---~ ----- l_normal = l_normal * lpeg.Cc(true) + lpeg.Cc(false)
-
---~ local function test(str)
---~ print(str,lpeg.match(l_normal,str))
---~ end
---~ test("ヒラギノ明朝 Pro W3")
---~ test("..ヒラギノ明朝 Pro W3")
---~ test(":ヒラギノ明朝 Pro W3;")
---~ test("ヒラギノ明朝 /Pro W3;")
---~ test("ヒラギノ明朝 Pro W3")
-
-function resolvers.generators.tex(specification)
- local tag = specification
- if trace_locating then
- logs.report("fileio","scanning path '%s'",specification)
- end
- instance.files[tag] = { }
- local files = instance.files[tag]
- local n, m, r = 0, 0, 0
- local spec = specification .. '/'
- local attributes = lfs.attributes
- local directory = lfs.dir
- local function action(path)
- local full
- if path then
- full = spec .. path .. '/'
- else
- full = spec
- end
- for name in directory(full) do
- if not lpegmatch(weird,name) then
- -- if lpegmatch(l_normal,name) then
- local mode = attributes(full..name,'mode')
- if mode == 'file' then
- if path then
- n = n + 1
- local f = files[name]
- if f then
- if type(f) == 'string' then
- files[name] = { f, path }
- else
- f[#f+1] = path
- end
- else -- probably unique anyway
- files[name] = path
- local lower = lower(name)
- if name ~= lower then
- files["remap:"..lower] = name
- r = r + 1
- end
- end
+local function locate_file_databases()
+ -- todo: cache:// and tree:// (runtime)
+ local texmfpaths = resolvers.expanded_path_list('TEXMF')
+ for i=1,#texmfpaths do
+ local path = collapse_path(texmfpaths[i])
+ local stripped = gsub(path,"^!!","")
+ local runtime = stripped == path
+ path = resolvers.clean_path(path)
+ if stripped ~= "" then
+ if lfs.isdir(path) then
+ local spec = resolvers.splitmethod(stripped)
+ if spec.scheme == "cache" then
+ stripped = spec.path
+ elseif runtime and (spec.noscheme or spec.scheme == "file") then
+ stripped = "tree:///" .. stripped
+ end
+ if trace_locating then
+ if runtime then
+ report_resolvers("locating list of '%s' (runtime)",path)
+ else
+ report_resolvers("locating list of '%s' (cached)",path)
end
- elseif mode == 'directory' then
- m = m + 1
- if path then
- action(path..'/'..name)
+ end
+ resolvers.locatedatabase(stripped) -- nothing done with result
+ else
+ if trace_locating then
+ if runtime then
+ report_resolvers("skipping list of '%s' (runtime)",path)
else
- action(name)
+ report_resolvers("skipping list of '%s' (cached)",path)
end
end
end
end
end
- action()
if trace_locating then
- logs.report("fileio","%s files found on %s directories with %s uppercase remappings",n,m,r)
+ report_resolvers()
end
end
--- savers, todo
-
-function resolvers.savefiles()
- resolvers.save_data('files')
+local function generate_file_databases()
+ local hashes = instance.hashes
+ for i=1,#hashes do
+ resolvers.methodhandler('generators',hashes[i].tag)
+ end
+ if trace_locating then
+ report_resolvers()
+ end
end
--- A config (optionally) has the paths split in tables. Internally
--- we join them and split them after the expansion has taken place. This
--- is more convenient.
-
---~ local checkedsplit = string.checkedsplit
-
-local cache = { }
-
-local splitter = lpeg.Ct(lpeg.splitat(lpeg.S(os.type == "windows" and ";" or ":;")))
-
-local function split_kpse_path(str) -- beware, this can be either a path or a {specification}
- local found = cache[str]
- if not found then
- if str == "" then
- found = { }
- else
- str = gsub(str,"\\","/")
---~ local split = (find(str,";") and checkedsplit(str,";")) or checkedsplit(str,io.pathseparator)
-local split = lpegmatch(splitter,str)
- found = { }
- for i=1,#split do
- local s = split[i]
- if not find(s,"^{*unset}*") then
- found[#found+1] = s
- end
- end
- if trace_expansions then
- logs.report("fileio","splitting path specification '%s'",str)
- for k=1,#found do
- logs.report("fileio","% 4i: %s",k,found[k])
- end
- end
- cache[str] = found
+local function save_file_databases() -- will become cachers
+ for i=1,#instance.hashes do
+ local hash = instance.hashes[i]
+ local cachename = hash.tag
+ if hash.cache then
+ local content = instance.files[cachename]
+ caches.collapsecontent(content)
+ caches.savecontent(cachename,"files",content)
+ elseif trace_locating then
+ report_resolvers("not saving runtime tree '%s'",cachename)
end
end
- return found
end
-resolvers.split_kpse_path = split_kpse_path
-
-function resolvers.splitconfig()
- for i=1,#instance do
- local c = instance[i]
- for k,v in next, c do
- if type(v) == 'string' then
- local t = split_kpse_path(v)
- if #t > 1 then
- c[k] = t
- end
- end
+local function load_databases()
+ locate_file_databases()
+ if instance.diskcache and not instance.renewcache then
+ load_file_databases()
+ if instance.loaderror then
+ generate_file_databases()
+ save_file_databases()
+ end
+ else
+ generate_file_databases()
+ if instance.renewcache then
+ save_file_databases()
end
end
end
-function resolvers.joinconfig()
- local order = instance.order
- for i=1,#order do
- local c = order[i]
- for k,v in next, c do -- indexed?
- if type(v) == 'table' then
- c[k] = file.join_path(v)
- end
- end
+function resolvers.append_hash(type,tag,name,cache)
+ if trace_locating then
+ report_resolvers("hash '%s' appended",tag)
end
+ insert(instance.hashes, { type = type, tag = tag, name = name, cache = cache } )
end
-function resolvers.split_path(str)
- if type(str) == 'table' then
- return str
- else
- return split_kpse_path(str)
+function resolvers.prepend_hash(type,tag,name,cache)
+ if trace_locating then
+ report_resolvers("hash '%s' prepended",tag)
end
+ insert(instance.hashes, 1, { type = type, tag = tag, name = name, cache = cache } )
end
-function resolvers.join_path(str)
- if type(str) == 'table' then
- return file.join_path(str)
+function resolvers.extend_texmf_var(specification) -- crap, we could better prepend the hash
+-- local t = resolvers.expanded_path_list('TEXMF') -- full expansion
+ local t = resolvers.split_path(resolvers.getenv('TEXMF'))
+ insert(t,1,specification)
+ local newspec = concat(t,";")
+ if instance.environment["TEXMF"] then
+ instance.environment["TEXMF"] = newspec
+ elseif instance.variables["TEXMF"] then
+ instance.variables["TEXMF"] = newspec
else
- return str
+ -- weird
end
+ resolvers.expand_variables()
+ reset_hashes()
+end
+
+function resolvers.generators.tex(specification,tag)
+ instance.files[tag or specification] = resolvers.scan_files(specification)
end
function resolvers.splitexpansions()
local ie = instance.expansions
for k,v in next, ie do
- local t, h, p = { }, { }, split_kpse_path(v)
+ local t, h, p = { }, { }, split_configuration_path(v)
for kk=1,#p do
local vv = p[kk]
if vv ~= "" and not h[vv] then
@@ -1009,222 +512,22 @@ end
-- end of split/join code
-function resolvers.saveoldconfig()
- resolvers.splitconfig()
- resolvers.save_data('configuration')
- resolvers.joinconfig()
-end
-
-resolvers.configbanner = [[
--- This is a Luatex configuration file created by 'luatools.lua' or
--- 'luatex.exe' directly. For comment, suggestions and questions you can
--- contact the ConTeXt Development Team. This configuration file is
--- not copyrighted. [HH & TH]
-]]
-
-function resolvers.serialize(files)
- -- This version is somewhat optimized for the kind of
- -- tables that we deal with, so it's much faster than
- -- the generic serializer. This makes sense because
- -- luatools and mtxtools are called frequently. Okay,
- -- we pay a small price for properly tabbed tables.
- local t = { }
- local function dump(k,v,m) -- could be moved inline
- if type(v) == 'string' then
- return m .. "['" .. k .. "']='" .. v .. "',"
- elseif #v == 1 then
- return m .. "['" .. k .. "']='" .. v[1] .. "',"
- else
- return m .. "['" .. k .. "']={'" .. concat(v,"','").. "'},"
- end
- end
- t[#t+1] = "return {"
- if instance.sortdata then
- local sortedfiles = sortedkeys(files)
- for i=1,#sortedfiles do
- local k = sortedfiles[i]
- local fk = files[k]
- if type(fk) == 'table' then
- t[#t+1] = "\t['" .. k .. "']={"
- local sortedfk = sortedkeys(fk)
- for j=1,#sortedfk do
- local kk = sortedfk[j]
- t[#t+1] = dump(kk,fk[kk],"\t\t")
- end
- t[#t+1] = "\t},"
- else
- t[#t+1] = dump(k,fk,"\t")
- end
- end
- else
- for k, v in next, files do
- if type(v) == 'table' then
- t[#t+1] = "\t['" .. k .. "']={"
- for kk,vv in next, v do
- t[#t+1] = dump(kk,vv,"\t\t")
- end
- t[#t+1] = "\t},"
- else
- t[#t+1] = dump(k,v,"\t")
- end
- end
- end
- t[#t+1] = "}"
- return concat(t,"\n")
-end
-
-local data_state = { }
+-- we used to have 'files' and 'configurations' so therefore the following
+-- shared function
function resolvers.data_state()
- return data_state or { }
-end
-
-function resolvers.save_data(dataname, makename) -- untested without cache overload
- for cachename, files in next, instance[dataname] do
- local name = (makename or file.join)(cachename,dataname)
- local luaname, lucname = name .. ".lua", name .. ".luc"
- if trace_locating then
- logs.report("fileio","preparing '%s' for '%s'",dataname,cachename)
- end
- for k, v in next, files do
- if type(v) == "table" and #v == 1 then
- files[k] = v[1]
- end
- end
- local data = {
- type = dataname,
- root = cachename,
- version = resolvers.cacheversion,
- date = os.date("%Y-%m-%d"),
- time = os.date("%H:%M:%S"),
- content = files,
- uuid = os.uuid(),
- }
- local ok = io.savedata(luaname,resolvers.serialize(data))
- if ok then
- if trace_locating then
- logs.report("fileio","'%s' saved in '%s'",dataname,luaname)
- end
- if utils.lua.compile(luaname,lucname,false,true) then -- no cleanup but strip
- if trace_locating then
- logs.report("fileio","'%s' compiled to '%s'",dataname,lucname)
- end
- else
- if trace_locating then
- logs.report("fileio","compiling failed for '%s', deleting file '%s'",dataname,lucname)
- end
- os.remove(lucname)
- end
- elseif trace_locating then
- logs.report("fileio","unable to save '%s' in '%s' (access error)",dataname,luaname)
- end
- end
-end
-
-function resolvers.load_data(pathname,dataname,filename,makename) -- untested without cache overload
- filename = ((not filename or (filename == "")) and dataname) or filename
- filename = (makename and makename(dataname,filename)) or file.join(pathname,filename)
- local blob = loadfile(filename .. ".luc") or loadfile(filename .. ".lua")
- if blob then
- local data = blob()
- if data and data.content and data.type == dataname and data.version == resolvers.cacheversion then
- data_state[#data_state+1] = data.uuid
- if trace_locating then
- logs.report("fileio","loading '%s' for '%s' from '%s'",dataname,pathname,filename)
- end
- instance[dataname][pathname] = data.content
- else
- if trace_locating then
- logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename)
- end
- instance[dataname][pathname] = { }
- instance.loaderror = true
- end
- elseif trace_locating then
- logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename)
- end
-end
-
--- some day i'll use the nested approach, but not yet (actually we even drop
--- engine/progname support since we have only luatex now)
---
--- first texmfcnf.lua files are located, next the cached texmf.cnf files
---
--- return {
--- TEXMFBOGUS = 'effe checken of dit werkt',
--- }
-
-function resolvers.resetconfig()
- identify_own()
- instance.configuration, instance.setup, instance.order, instance.loaderror = { }, { }, { }, false
-end
-
-function resolvers.loadnewconfig()
- local luafiles = instance.luafiles
- for i=1,#luafiles do
- local cnf = luafiles[i]
- local pathname = file.dirname(cnf)
- local filename = file.join(pathname,resolvers.luaname)
- local blob = loadfile(filename)
- if blob then
- local data = blob()
- if data then
- if trace_locating then
- logs.report("fileio","loading configuration file '%s'",filename)
- end
- if true then
- -- flatten to variable.progname
- local t = { }
- for k, v in next, data do -- v = progname
- if type(v) == "string" then
- t[k] = v
- else
- for kk, vv in next, v do -- vv = variable
- if type(vv) == "string" then
- t[vv.."."..v] = kk
- end
- end
- end
- end
- instance['setup'][pathname] = t
- else
- instance['setup'][pathname] = data
- end
- else
- if trace_locating then
- logs.report("fileio","skipping configuration file '%s'",filename)
- end
- instance['setup'][pathname] = { }
- instance.loaderror = true
- end
- elseif trace_locating then
- logs.report("fileio","skipping configuration file '%s'",filename)
- end
- instance.order[#instance.order+1] = instance.setup[pathname]
- if instance.loaderror then break end
- end
-end
-
-function resolvers.loadoldconfig()
- if not instance.renewcache then
- local cnffiles = instance.cnffiles
- for i=1,#cnffiles do
- local cnf = cnffiles[i]
- local dname = file.dirname(cnf)
- resolvers.load_data(dname,'configuration')
- instance.order[#instance.order+1] = instance.configuration[dname]
- if instance.loaderror then break end
- end
- end
- resolvers.joinconfig()
+ return caches.contentstate()
end
function resolvers.expand_variables()
local expansions, environment, variables = { }, instance.environment, instance.variables
- local env = resolvers.env
+ local getenv = resolvers.getenv
instance.expansions = expansions
- if instance.engine ~= "" then environment['engine'] = instance.engine end
- if instance.progname ~= "" then environment['progname'] = instance.progname end
+ local engine, progname = instance.engine, instance.progname
+ if type(engine) ~= "string" then instance.engine, engine = "", "" end
+ if type(progname) ~= "string" then instance.progname, progname = "", "" end
+ if engine ~= "" then environment['engine'] = engine end
+ if progname ~= "" then environment['progname'] = progname end
for k,v in next, environment do
local a, b = match(k,"^(%a+)%_(.*)%s*$")
if a and b then
@@ -1233,7 +536,7 @@ function resolvers.expand_variables()
expansions[k] = v
end
end
- for k,v in next, environment do -- move environment to expansions
+ for k,v in next, environment do -- move environment to expansions (variables are already in there)
if not expansions[k] then expansions[k] = v end
end
for k,v in next, variables do -- move variables to expansions
@@ -1242,7 +545,7 @@ function resolvers.expand_variables()
local busy = false
local function resolve(a)
busy = true
- return expansions[a] or env(a)
+ return expansions[a] or getenv(a)
end
while true do
busy = false
@@ -1250,6 +553,8 @@ function resolvers.expand_variables()
local s, n = gsub(v,"%$([%a%d%_%-]+)",resolve)
local s, m = gsub(s,"%$%{([%a%d%_%-]+)%}",resolve)
if n > 0 or m > 0 then
+ s = gsub(s,";+",";")
+ s = gsub(s,";[!{}/\\]+;",";")
expansions[k]= s
end
end
@@ -1286,63 +591,59 @@ function resolvers.unexpanded_path(str)
return file.join_path(resolvers.unexpanded_path_list(str))
end
-do -- no longer needed
+local done = { }
- local done = { }
-
- function resolvers.reset_extra_path()
- local ep = instance.extra_paths
- if not ep then
- ep, done = { }, { }
- instance.extra_paths = ep
- elseif #ep > 0 then
- instance.lists, done = { }, { }
- end
+function resolvers.reset_extra_path()
+ local ep = instance.extra_paths
+ if not ep then
+ ep, done = { }, { }
+ instance.extra_paths = ep
+ elseif #ep > 0 then
+ instance.lists, done = { }, { }
end
+end
- function resolvers.register_extra_path(paths,subpaths)
- local ep = instance.extra_paths or { }
- local n = #ep
- if paths and paths ~= "" then
- if subpaths and subpaths ~= "" then
- for p in gmatch(paths,"[^,]+") do
- -- we gmatch each step again, not that fast, but used seldom
- for s in gmatch(subpaths,"[^,]+") do
- local ps = p .. "/" .. s
- if not done[ps] then
- ep[#ep+1] = resolvers.clean_path(ps)
- done[ps] = true
- end
- end
- end
- else
- for p in gmatch(paths,"[^,]+") do
- if not done[p] then
- ep[#ep+1] = resolvers.clean_path(p)
- done[p] = true
- end
- end
- end
- elseif subpaths and subpaths ~= "" then
- for i=1,n do
+function resolvers.register_extra_path(paths,subpaths)
+ local ep = instance.extra_paths or { }
+ local n = #ep
+ if paths and paths ~= "" then
+ if subpaths and subpaths ~= "" then
+ for p in gmatch(paths,"[^,]+") do
-- we gmatch each step again, not that fast, but used seldom
for s in gmatch(subpaths,"[^,]+") do
- local ps = ep[i] .. "/" .. s
+ local ps = p .. "/" .. s
if not done[ps] then
ep[#ep+1] = resolvers.clean_path(ps)
done[ps] = true
end
end
end
+ else
+ for p in gmatch(paths,"[^,]+") do
+ if not done[p] then
+ ep[#ep+1] = resolvers.clean_path(p)
+ done[p] = true
+ end
+ end
end
- if #ep > 0 then
- instance.extra_paths = ep -- register paths
- end
- if #ep > n then
- instance.lists = { } -- erase the cache
+ elseif subpaths and subpaths ~= "" then
+ for i=1,n do
+ -- we gmatch each step again, not that fast, but used seldom
+ for s in gmatch(subpaths,"[^,]+") do
+ local ps = ep[i] .. "/" .. s
+ if not done[ps] then
+ ep[#ep+1] = resolvers.clean_path(ps)
+ done[ps] = true
+ end
+ end
end
end
-
+ if #ep > 0 then
+ instance.extra_paths = ep -- register paths
+ end
+ if #ep > n then
+ instance.lists = { } -- erase the cache
+ end
end
local function made_list(instance,list)
@@ -1387,7 +688,7 @@ function resolvers.clean_path_list(str)
local t = resolvers.expanded_path_list(str)
if t then
for i=1,#t do
- t[i] = file.collapse_path(resolvers.clean_path(t[i]))
+ t[i] = collapse_path(resolvers.clean_path(t[i]))
end
end
return t
@@ -1427,33 +728,6 @@ function resolvers.expand_path_from_var(str)
return file.join_path(resolvers.expanded_path_list_from_var(str))
end
-function resolvers.format_of_var(str)
- return formats[str] or formats[alternatives[str]] or ''
-end
-function resolvers.format_of_suffix(str)
- return suffixmap[file.extname(str)] or 'tex'
-end
-
-function resolvers.variable_of_format(str)
- return formats[str] or formats[alternatives[str]] or ''
-end
-
-function resolvers.var_of_format_or_suffix(str)
- local v = formats[str]
- if v then
- return v
- end
- v = formats[alternatives[str]]
- if v then
- return v
- end
- v = suffixmap[file.extname(str)]
- if v then
- return formats[isf]
- end
- return ''
-end
-
function resolvers.expand_braces(str) -- output variable and brace expansion of STRING
local ori = resolvers.variable(str)
local pth = expanded_path_from_list(resolvers.split_path(ori))
@@ -1466,9 +740,9 @@ function resolvers.isreadable.file(name)
local readable = lfs.isfile(name) -- brrr
if trace_detail then
if readable then
- logs.report("fileio","file '%s' is readable",name)
+ report_resolvers("file '%s' is readable",name)
else
- logs.report("fileio","file '%s' is not readable", name)
+ report_resolvers("file '%s' is not readable", name)
end
end
return readable
@@ -1484,10 +758,10 @@ local function collect_files(names)
for k=1,#names do
local fname = names[k]
if trace_detail then
- logs.report("fileio","checking name '%s'",fname)
+ report_resolvers("checking name '%s'",fname)
end
- local bname = file.basename(fname)
- local dname = file.dirname(fname)
+ local bname = filebasename(fname)
+ local dname = filedirname(fname)
if dname == "" or find(dname,"^%.") then
dname = false
else
@@ -1500,7 +774,7 @@ local function collect_files(names)
local files = blobpath and instance.files[blobpath]
if files then
if trace_detail then
- logs.report("fileio","deep checking '%s' (%s)",blobpath,bname)
+ report_resolvers("deep checking '%s' (%s)",blobpath,bname)
end
local blobfile = files[bname]
if not blobfile then
@@ -1512,53 +786,38 @@ local function collect_files(names)
end
end
if blobfile then
+ local blobroot = files.__path__ or blobpath
if type(blobfile) == 'string' then
if not dname or find(blobfile,dname) then
- filelist[#filelist+1] = {
- hash.type,
- file.join(blobpath,blobfile,bname), -- search
- resolvers.concatinators[hash.type](blobpath,blobfile,bname) -- result
- }
+ local kind = hash.type
+ local search = filejoin(blobpath,blobfile,bname)
+ local result = resolvers.concatinators[hash.type](blobroot,blobfile,bname)
+ if trace_detail then
+ report_resolvers("match: kind '%s', search '%s', result '%s'",kind,search,result)
+ end
+ filelist[#filelist+1] = { kind, search, result }
end
else
for kk=1,#blobfile do
local vv = blobfile[kk]
if not dname or find(vv,dname) then
- filelist[#filelist+1] = {
- hash.type,
- file.join(blobpath,vv,bname), -- search
- resolvers.concatinators[hash.type](blobpath,vv,bname) -- result
- }
+ local kind = hash.type
+ local search = filejoin(blobpath,vv,bname)
+ local result = resolvers.concatinators[hash.type](blobroot,vv,bname)
+ if trace_detail then
+ report_resolvers("match: kind '%s', search '%s', result '%s'",kind,search,result)
+ end
+ filelist[#filelist+1] = { kind, search, result }
end
end
end
end
elseif trace_locating then
- logs.report("fileio","no match in '%s' (%s)",blobpath,bname)
+ report_resolvers("no match in '%s' (%s)",blobpath,bname)
end
end
end
- if #filelist > 0 then
- return filelist
- else
- return nil
- end
-end
-
-function resolvers.suffix_of_format(str)
- if suffixes[str] then
- return suffixes[str][1]
- else
- return ""
- end
-end
-
-function resolvers.suffixes_of_format(str)
- if suffixes[str] then
- return suffixes[str]
- else
- return {}
- end
+ return #filelist > 0 and filelist or nil
end
function resolvers.register_in_trees(name)
@@ -1578,27 +837,28 @@ local function can_be_dir(name) -- can become local
fakepaths[name] = 2 -- no directory
end
end
- return (fakepaths[name] == 1)
+ return fakepaths[name] == 1
end
local function collect_instance_files(filename,collected) -- todo : plugin (scanners, checkers etc)
local result = collected or { }
local stamp = nil
- filename = file.collapse_path(filename)
+ filename = collapse_path(filename)
-- speed up / beware: format problem
if instance.remember then
stamp = filename .. "--" .. instance.engine .. "--" .. instance.progname .. "--" .. instance.format
if instance.found[stamp] then
if trace_locating then
- logs.report("fileio","remembering file '%s'",filename)
+ report_resolvers("remembering file '%s'",filename)
end
+ resolvers.register_in_trees(filename) -- for tracing used files
return instance.found[stamp]
end
end
if not dangerous[instance.format or "?"] then
if resolvers.isreadable.file(filename) then
if trace_detail then
- logs.report("fileio","file '%s' found directly",filename)
+ report_resolvers("file '%s' found directly",filename)
end
instance.found[stamp] = { filename }
return { filename }
@@ -1606,36 +866,39 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
end
if find(filename,'%*') then
if trace_locating then
- logs.report("fileio","checking wildcard '%s'", filename)
+ report_resolvers("checking wildcard '%s'", filename)
end
result = resolvers.find_wildcard_files(filename)
elseif file.is_qualified_path(filename) then
if resolvers.isreadable.file(filename) then
if trace_locating then
- logs.report("fileio","qualified name '%s'", filename)
+ report_resolvers("qualified name '%s'", filename)
end
result = { filename }
else
- local forcedname, ok, suffix = "", false, file.extname(filename)
+ local forcedname, ok, suffix = "", false, fileextname(filename)
if suffix == "" then -- why
if instance.format == "" then
forcedname = filename .. ".tex"
if resolvers.isreadable.file(forcedname) then
if trace_locating then
- logs.report("fileio","no suffix, forcing standard filetype 'tex'")
+ report_resolvers("no suffix, forcing standard filetype 'tex'")
end
result, ok = { forcedname }, true
end
else
- local suffixes = resolvers.suffixes_of_format(instance.format)
- for _, s in next, suffixes do
- forcedname = filename .. "." .. s
- if resolvers.isreadable.file(forcedname) then
- if trace_locating then
- logs.report("fileio","no suffix, forcing format filetype '%s'", s)
+ local format_suffixes = suffixes[instance.format]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s = format_suffixes[i]
+ forcedname = filename .. "." .. s
+ if resolvers.isreadable.file(forcedname) then
+ if trace_locating then
+ report_resolvers("no suffix, forcing format filetype '%s'", s)
+ end
+ result, ok = { forcedname }, true
+ break
end
- result, ok = { forcedname }, true
- break
end
end
end
@@ -1643,7 +906,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
if not ok and suffix ~= "" then
-- try to find in tree (no suffix manipulation), here we search for the
-- matching last part of the name
- local basename = file.basename(filename)
+ local basename = filebasename(filename)
local pattern = gsub(filename .. "$","([%.%-])","%%%1")
local savedformat = instance.format
local format = savedformat or ""
@@ -1684,12 +947,16 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
-- end
end
if not ok and trace_locating then
- logs.report("fileio","qualified name '%s'", filename)
+ report_resolvers("qualified name '%s'", filename)
end
end
else
-- search spec
- local filetype, extra, done, wantedfiles, ext = '', nil, false, { }, file.extname(filename)
+ local filetype, extra, done, wantedfiles, ext = '', nil, false, { }, fileextname(filename)
+ -- tricky as filename can be bla.1.2.3
+--~ if not suffixmap[ext] then --- probably needs to be done elsewhere too
+--~ wantedfiles[#wantedfiles+1] = filename
+--~ end
if ext == "" then
if not instance.force_suffixes then
wantedfiles[#wantedfiles+1] = filename
@@ -1698,29 +965,31 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
wantedfiles[#wantedfiles+1] = filename
end
if instance.format == "" then
- if ext == "" then
+ if ext == "" or not suffixmap[ext] then
local forcedname = filename .. '.tex'
wantedfiles[#wantedfiles+1] = forcedname
filetype = resolvers.format_of_suffix(forcedname)
if trace_locating then
- logs.report("fileio","forcing filetype '%s'",filetype)
+ report_resolvers("forcing filetype '%s'",filetype)
end
else
filetype = resolvers.format_of_suffix(filename)
if trace_locating then
- logs.report("fileio","using suffix based filetype '%s'",filetype)
+ report_resolvers("using suffix based filetype '%s'",filetype)
end
end
else
- if ext == "" then
- local suffixes = resolvers.suffixes_of_format(instance.format)
- for _, s in next, suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. s
+ if ext == "" or not suffixmap[ext] then
+ local format_suffixes = suffixes[instance.format]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ end
end
end
filetype = instance.format
if trace_locating then
- logs.report("fileio","using given filetype '%s'",filetype)
+ report_resolvers("using given filetype '%s'",filetype)
end
end
local typespec = resolvers.variable_of_format(filetype)
@@ -1728,13 +997,13 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
if not pathlist or #pathlist == 0 then
-- no pathlist, access check only / todo == wildcard
if trace_detail then
- logs.report("fileio","checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ report_resolvers("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
end
for k=1,#wantedfiles do
local fname = wantedfiles[k]
if fname and resolvers.isreadable.file(fname) then
filename, done = fname, true
- result[#result+1] = file.join('.',fname)
+ result[#result+1] = filejoin('.',fname)
break
end
end
@@ -1752,11 +1021,11 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
local dirlist = { }
if filelist then
for i=1,#filelist do
- dirlist[i] = file.dirname(filelist[i][2]) .. "/"
+ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
end
end
if trace_detail then
- logs.report("fileio","checking filename '%s'",filename)
+ report_resolvers("checking filename '%s'",filename)
end
-- a bit messy ... esp the doscan setting here
local doscan
@@ -1779,7 +1048,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
expression = gsub(expression,"//", '/.-/') -- not ok for /// but harmless
expression = "^" .. expression .. "$"
if trace_detail then
- logs.report("fileio","using pattern '%s' for path '%s'",expression,pathname)
+ report_resolvers("using pattern '%s' for path '%s'",expression,pathname)
end
for k=1,#filelist do
local fl = filelist[k]
@@ -1788,20 +1057,19 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
if find(d,expression) then
--- todo, test for readable
result[#result+1] = fl[3]
- resolvers.register_in_trees(f) -- for tracing used files
done = true
if instance.allresults then
if trace_detail then
- logs.report("fileio","match in hash for file '%s' on path '%s', continue scanning",f,d)
+ report_resolvers("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
end
else
if trace_detail then
- logs.report("fileio","match in hash for file '%s' on path '%s', quit scanning",f,d)
+ report_resolvers("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
end
break
end
elseif trace_detail then
- logs.report("fileio","no match in hash for file '%s' on path '%s'",f,d)
+ report_resolvers("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
end
end
end
@@ -1814,10 +1082,10 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
if can_be_dir(ppname) then
for k=1,#wantedfiles do
local w = wantedfiles[k]
- local fname = file.join(ppname,w)
+ local fname = filejoin(ppname,w)
if resolvers.isreadable.file(fname) then
if trace_detail then
- logs.report("fileio","found '%s' by scanning",fname)
+ report_resolvers("found '%s' by scanning",fname)
end
result[#result+1] = fname
done = true
@@ -1831,14 +1099,16 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
end
end
if not done and doscan then
- -- todo: slow path scanning
+ -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF
end
if done and not instance.allresults then break end
end
end
end
for k=1,#result do
- result[k] = file.collapse_path(result[k])
+ local rk = collapse_path(result[k])
+ result[k] = rk
+ resolvers.register_in_trees(rk) -- for tracing used files
end
if instance.remember then
instance.found[stamp] = result
@@ -1848,7 +1118,7 @@ end
if not resolvers.concatinators then resolvers.concatinators = { } end
-resolvers.concatinators.tex = file.join
+resolvers.concatinators.tex = filejoin
resolvers.concatinators.file = resolvers.concatinators.tex
function resolvers.find_files(filename,filetype,mustexist)
@@ -1875,8 +1145,14 @@ function resolvers.find_file(filename,filetype,mustexist)
return (resolvers.find_files(filename,filetype,mustexist)[1] or "")
end
+function resolvers.find_path(filename,filetype)
+ local path = resolvers.find_files(filename,filetype)[1] or ""
+ -- todo return current path
+ return file.dirname(path)
+end
+
function resolvers.find_given_files(filename)
- local bname, result = file.basename(filename), { }
+ local bname, result = filebasename(filename), { }
local hashes = instance.hashes
for k=1,#hashes do
local hash = hashes[k]
@@ -1933,9 +1209,9 @@ local function doit(path,blist,bname,tag,kind,result,allresults)
return done
end
-function resolvers.find_wildcard_files(filename) -- todo: remap:
+function resolvers.find_wildcard_files(filename) -- todo: remap: and lpeg
local result = { }
- local bname, dname = file.basename(filename), file.dirname(filename)
+ local bname, dname = filebasename(filename), filedirname(filename)
local path = gsub(dname,"^*/","")
path = gsub(path,"*",".*")
path = gsub(path,"-","%%-")
@@ -1988,24 +1264,24 @@ end
function resolvers.load(option)
statistics.starttiming(instance)
- resolvers.resetconfig()
- resolvers.identify_cnf()
- resolvers.load_lua() -- will become the new method
- resolvers.expand_variables()
- resolvers.load_cnf() -- will be skipped when we have a lua file
+ identify_configuration_files()
+ load_configuration_files()
+ collapse_configuration_data()
resolvers.expand_variables()
if option ~= "nofiles" then
- resolvers.load_hash()
+ load_databases()
resolvers.automount()
end
statistics.stoptiming(instance)
+ local files = instance.files
+ return files and next(files) and true
end
function resolvers.for_files(command, files, filetype, mustexist)
if files and #files > 0 then
local function report(str)
if trace_locating then
- logs.report("fileio",str) -- has already verbose
+ report_resolvers(str) -- has already verbose
else
print(str)
end
@@ -2053,51 +1329,6 @@ function resolvers.register_file(files, name, path)
end
end
-function resolvers.splitmethod(filename)
- if not filename then
- return { } -- safeguard
- elseif type(filename) == "table" then
- return filename -- already split
- elseif not find(filename,"://") then
- return { scheme="file", path = filename, original=filename } -- quick hack
- else
- return url.hashed(filename)
- end
-end
-
-function table.sequenced(t,sep) -- temp here
- local s = { }
- for k, v in next, t do -- indexed?
- s[#s+1] = k .. "=" .. tostring(v)
- end
- return concat(s, sep or " | ")
-end
-
-function resolvers.methodhandler(what, filename, filetype) -- ...
- filename = file.collapse_path(filename)
- local specification = (type(filename) == "string" and resolvers.splitmethod(filename)) or filename -- no or { }, let it bomb
- local scheme = specification.scheme
- if resolvers[what][scheme] then
- if trace_locating then
- logs.report("fileio","handler '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification))
- end
- return resolvers[what][scheme](filename,filetype) -- todo: specification
- else
- return resolvers[what].tex(filename,filetype) -- todo: specification
- end
-end
-
-function resolvers.clean_path(str)
- if str then
- str = gsub(str,"\\","/")
- str = gsub(str,"^!+","")
- str = gsub(str,"^~",resolvers.homedir)
- return str
- else
- return nil
- end
-end
-
function resolvers.do_with_path(name,func)
local pathlist = resolvers.expanded_path_list(name)
for i=1,#pathlist do
@@ -2109,45 +1340,13 @@ function resolvers.do_with_var(name,func)
func(expanded_var(name))
end
-function resolvers.with_files(pattern,handle)
- local hashes = instance.hashes
- for i=1,#hashes do
- local hash = hashes[i]
- local blobpath = hash.tag
- local blobtype = hash.type
- if blobpath then
- local files = instance.files[blobpath]
- if files then
- for k,v in next, files do
- if find(k,"^remap:") then
- k = files[k]
- v = files[k] -- chained
- end
- if find(k,pattern) then
- if type(v) == "string" then
- handle(blobtype,blobpath,v,k)
- else
- for _,vv in next, v do -- indexed
- handle(blobtype,blobpath,vv,k)
- end
- end
- end
- end
- end
- end
- end
-end
-
function resolvers.locate_format(name)
- local barename, fmtname = gsub(name,"%.%a+$",""), ""
- if resolvers.usecache then
- local path = file.join(caches.setpath("formats")) -- maybe platform
- fmtname = file.join(path,barename..".fmt") or ""
- end
+ local barename = gsub(name,"%.%a+$","")
+ local fmtname = caches.getfirstreadablefile(barename..".fmt","formats") or ""
if fmtname == "" then
fmtname = resolvers.find_files(barename..".fmt")[1] or ""
+ fmtname = resolvers.clean_path(fmtname)
end
- fmtname = resolvers.clean_path(fmtname)
if fmtname ~= "" then
local barename = file.removesuffix(fmtname)
local luaname, lucname, luiname = barename .. ".lua", barename .. ".luc", barename .. ".lui"
@@ -2172,10 +1371,46 @@ function resolvers.boolean_variable(str,default)
end
end
-texconfig.kpse_init = false
-
-kpse = { original = kpse } setmetatable(kpse, { __index = function(k,v) return resolvers[v] end } )
-
--- for a while
-
-input = resolvers
+function resolvers.with_files(pattern,handle,before,after) -- can be a nice iterator instead
+ local instance = resolvers.instance
+ local hashes = instance.hashes
+ for i=1,#hashes do
+ local hash = hashes[i]
+ local blobtype = hash.type
+ local blobpath = hash.tag
+ if blobpath then
+ if before then
+ before(blobtype,blobpath,pattern)
+ end
+ local files = instance.files[blobpath]
+ local total, checked, done = 0, 0, 0
+ if files then
+ for k,v in next, files do
+ total = total + 1
+ if find(k,"^remap:") then
+ k = files[k]
+ v = k -- files[k] -- chained
+ end
+ if find(k,pattern) then
+ if type(v) == "string" then
+ checked = checked + 1
+ if handle(blobtype,blobpath,v,k) then
+ done = done + 1
+ end
+ else
+ checked = checked + #v
+ for i=1,#v do
+ if handle(blobtype,blobpath,v[i],k) then
+ done = done + 1
+ end
+ end
+ end
+ end
+ end
+ end
+ if after then
+ after(blobtype,blobpath,pattern,total,checked,done)
+ end
+ end
+ end
+end
diff --git a/tex/context/base/data-sch.lua b/tex/context/base/data-sch.lua
index e68b6cd01..28f18389e 100644
--- a/tex/context/base/data-sch.lua
+++ b/tex/context/base/data-sch.lua
@@ -8,22 +8,20 @@ if not modules then modules = { } end modules ['data-sch'] = {
local http = require("socket.http")
local ltn12 = require("ltn12")
-
local gsub, concat, format = string.gsub, table.concat, string.format
+local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
local trace_schemes = false trackers.register("resolvers.schemes",function(v) trace_schemes = v end)
+local report_schemes = logs.new("schemes")
+
schemes = schemes or { }
-schemes.cached = { }
-schemes.cachepath = caches.definepath("schemes")
schemes.threshold = 24 * 60 * 60
directives.register("schemes.threshold", function(v) schemes.threshold = tonumber(v) or schemes.threshold end)
-local cached, loaded, reused = schemes.cached, { }, { }
-
-local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
+local cached, loaded, reused = { }, { }, { }
function schemes.curl(name,cachename)
local command = "curl --silent --create-dirs --output " .. cachename .. " " .. name -- no protocol .. "://"
@@ -31,21 +29,21 @@ function schemes.curl(name,cachename)
end
function schemes.fetch(protocol,name,handler)
- local cachename = schemes.cachepath() .. "/" .. gsub(name,"[^%a%d%.]+","-")
- cachename = gsub(cachename,"[\\]", "/") -- cleanup
+ local cleanname = gsub(name,"[^%a%d%.]+","-")
+ local cachename = caches.setfirstwritablefile(cleanname,"schemes")
if not cached[name] then
statistics.starttiming(schemes)
if not io.exists(cachename) or (os.difftime(os.time(),lfs.attributes(cachename).modification) > schemes.threshold) then
cached[name] = cachename
if handler then
if trace_schemes then
- logs.report("schemes","fetching '%s', protocol '%s', method 'built-in'",name,protocol)
+ report_schemes("fetching '%s', protocol '%s', method 'built-in'",name,protocol)
end
io.flush()
handler(protocol,name,cachename)
else
if trace_schemes then
- logs.report("schemes","fetching '%s', protocol '%s', method 'curl'",name,protocol)
+ report_schemes("fetching '%s', protocol '%s', method 'curl'",name,protocol)
end
io.flush()
schemes.curl(name,cachename)
@@ -54,19 +52,19 @@ function schemes.fetch(protocol,name,handler)
if io.exists(cachename) then
cached[name] = cachename
if trace_schemes then
- logs.report("schemes","using cached '%s', protocol '%s', cachename '%s'",name,protocol,cachename)
+ report_schemes("using cached '%s', protocol '%s', cachename '%s'",name,protocol,cachename)
end
else
cached[name] = ""
if trace_schemes then
- logs.report("schemes","using missing '%s', protocol '%s'",name,protocol)
+ report_schemes("using missing '%s', protocol '%s'",name,protocol)
end
end
loaded[protocol] = loaded[protocol] + 1
statistics.stoptiming(schemes)
else
if trace_schemes then
- logs.report("schemes","reusing '%s', protocol '%s'",name,protocol)
+ report_schemes("reusing '%s', protocol '%s'",name,protocol)
end
reused[protocol] = reused[protocol] + 1
end
@@ -75,7 +73,7 @@ end
function finders.schemes(protocol,filename,handler)
local foundname = schemes.fetch(protocol,filename,handler)
- return finders.generic(protocol,foundname,filetype)
+ return finders.generic(protocol,foundname)
end
function openers.schemes(protocol,filename)
diff --git a/tex/context/base/data-tex.lua b/tex/context/base/data-tex.lua
index c9fa3625a..727964d1f 100644
--- a/tex/context/base/data-tex.lua
+++ b/tex/context/base/data-tex.lua
@@ -13,8 +13,7 @@ local unpack = unpack or table.unpack
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-local texiowrite_nl = (texio and texio.write_nl) or print
-local texiowrite = (texio and texio.write) or print
+local report_resolvers = logs.new("resolvers")
local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
@@ -22,12 +21,12 @@ function finders.generic(tag,filename,filetype)
local foundname = resolvers.find_file(filename,filetype)
if foundname and foundname ~= "" then
if trace_locating then
- logs.report("fileio","%s finder: file '%s' found",tag,filename)
+ report_resolvers("%s finder: file '%s' found",tag,filename)
end
return foundname
else
if trace_locating then
- logs.report("fileio","%s finder: unknown file '%s'",tag,filename)
+ report_resolvers("%s finder: unknown file '%s'",tag,filename)
end
return unpack(finders.notfound)
end
@@ -49,7 +48,7 @@ function openers.text_opener(filename,file_handle,tag)
local t = { }
if u > 0 then
if trace_locating then
- logs.report("fileio","%s opener, file '%s' opened using method '%s'",tag,filename,unicode.utfname[u])
+ report_resolvers("%s opener, file '%s' opened using method '%s'",tag,filename,unicode.utfname[u])
end
local l
if u > 2 then
@@ -66,7 +65,7 @@ function openers.text_opener(filename,file_handle,tag)
noflines = #l,
close = function()
if trace_locating then
- logs.report("fileio","%s closer, file '%s' closed",tag,filename)
+ report_resolvers("%s closer, file '%s' closed",tag,filename)
end
logs.show_close(filename)
t = nil
@@ -101,7 +100,7 @@ function openers.text_opener(filename,file_handle,tag)
}
else
if trace_locating then
- logs.report("fileio","%s opener, file '%s' opened",tag,filename)
+ report_resolvers("%s opener, file '%s' opened",tag,filename)
end
-- todo: file;name -> freeze / eerste regel scannen -> freeze
--~ local data = lpegmatch(getlines,file_handle:read("*a"))
@@ -131,7 +130,7 @@ function openers.text_opener(filename,file_handle,tag)
end,
close = function()
if trace_locating then
- logs.report("fileio","%s closer, file '%s' closed",tag,filename)
+ report_resolvers("%s closer, file '%s' closed",tag,filename)
end
logs.show_close(filename)
file_handle:close()
@@ -156,13 +155,13 @@ function openers.generic(tag,filename)
if f then
logs.show_open(filename) -- todo
if trace_locating then
- logs.report("fileio","%s opener, file '%s' opened",tag,filename)
+ report_resolvers("%s opener, file '%s' opened",tag,filename)
end
return openers.text_opener(filename,f,tag)
end
end
if trace_locating then
- logs.report("fileio","%s opener, file '%s' not found",tag,filename)
+ report_resolvers("%s opener, file '%s' not found",tag,filename)
end
return unpack(openers.notfound)
end
@@ -173,7 +172,7 @@ function loaders.generic(tag,filename)
if f then
logs.show_load(filename)
if trace_locating then
- logs.report("fileio","%s loader, file '%s' loaded",tag,filename)
+ report_resolvers("%s loader, file '%s' loaded",tag,filename)
end
local s = f:read("*a")
if garbagecollector and garbagecollector.check then garbagecollector.check(#s) end
@@ -184,7 +183,7 @@ function loaders.generic(tag,filename)
end
end
if trace_locating then
- logs.report("fileio","%s loader, file '%s' not found",tag,filename)
+ report_resolvers("%s loader, file '%s' not found",tag,filename)
end
return unpack(loaders.notfound)
end
diff --git a/tex/context/base/data-tmf.lua b/tex/context/base/data-tmf.lua
index 7421eacfc..c26f8e6e6 100644
--- a/tex/context/base/data-tmf.lua
+++ b/tex/context/base/data-tmf.lua
@@ -6,70 +6,52 @@ if not modules then modules = { } end modules ['data-tmf'] = {
license = "see context related readme files"
}
-local find, gsub, match = string.find, string.gsub, string.match
-local getenv, setenv = os.getenv, os.setenv
+-- = <<
+-- ? ??
+-- < +=
+-- > =+
--- loads *.tmf files in minimal tree roots (to be optimized and documented)
+function resolvers.load_tree(tree)
+ if type(tree) == "string" and tree ~= "" then
-function resolvers.check_environment(tree)
- logs.simpleline()
- setenv('TMP', getenv('TMP') or getenv('TEMP') or getenv('TMPDIR') or getenv('HOME'))
- setenv('TEXOS', getenv('TEXOS') or ("texmf-" .. os.platform))
- setenv('TEXPATH', gsub(tree or "tex","\/+$",''))
- setenv('TEXMFOS', getenv('TEXPATH') .. "/" .. getenv('TEXOS'))
- logs.simpleline()
- logs.simple("preset : TEXPATH => %s", getenv('TEXPATH'))
- logs.simple("preset : TEXOS => %s", getenv('TEXOS'))
- logs.simple("preset : TEXMFOS => %s", getenv('TEXMFOS'))
- logs.simple("preset : TMP => %s", getenv('TMP'))
- logs.simple('')
-end
+ local getenv, setenv = resolvers.getenv, resolvers.setenv
-function resolvers.load_environment(name) -- todo: key=value as well as lua
- local f = io.open(name)
- if f then
- for line in f:lines() do
- if find(line,"^[%%%#]") then
- -- skip comment
- else
- local key, how, value = match(line,"^(.-)%s*([<=>%?]+)%s*(.*)%s*$")
- if how then
- value = gsub(value,"%%(.-)%%", function(v) return getenv(v) or "" end)
- if how == "=" or how == "<<" then
- setenv(key,value)
- elseif how == "?" or how == "??" then
- setenv(key,getenv(key) or value)
- elseif how == "<" or how == "+=" then
- if getenv(key) then
- setenv(key,getenv(key) .. io.fileseparator .. value)
- else
- setenv(key,value)
- end
- elseif how == ">" or how == "=+" then
- if getenv(key) then
- setenv(key,value .. io.pathseparator .. getenv(key))
- else
- setenv(key,value)
- end
- end
- end
- end
- end
- f:close()
- end
-end
+ -- later might listen to the raw osenv var as well
+ local texos = "texmf-" .. os.platform
-function resolvers.load_tree(tree)
- if tree and tree ~= "" then
- local setuptex = 'setuptex.tmf'
- if lfs.attributes(tree, "mode") == "directory" then -- check if not nil
- setuptex = tree .. "/" .. setuptex
- else
- setuptex = tree
+ local oldroot = environment.texroot
+ local newroot = file.collapse_path(tree)
+
+ local newtree = file.join(newroot,texos)
+ local newpath = file.join(newtree,"bin")
+
+ if not lfs.isdir(newtree) then
+ logs.simple("no '%s' under tree %s",texos,tree)
+ os.exit()
end
- if io.exists(setuptex) then
- resolvers.check_environment(tree)
- resolvers.load_environment(setuptex)
+ if not lfs.isdir(newpath) then
+ logs.simple("no '%s/bin' under tree %s",texos,tree)
+ os.exit()
end
+
+ local texmfos = newtree
+
+ environment.texroot = newroot
+ environment.texos = texos
+ environment.texmfos = texmfos
+
+ setenv('SELFAUTOPARENT', newroot)
+ setenv('SELFAUTODIR', newtree)
+ setenv('SELFAUTOLOC', newpath)
+ setenv('TEXROOT', newroot)
+ setenv('TEXOS', texos)
+ setenv('TEXMFOS', texmfos)
+ setenv('TEXROOT', newroot)
+ setenv('TEXMFCNF', resolvers.luacnfspec)
+ setenv("PATH", newpath .. io.pathseparator .. getenv("PATH"))
+
+ logs.simple("changing from root '%s' to '%s'",oldroot,newroot)
+ logs.simple("prepending '%s' to binary path",newpath)
+ logs.simple()
end
end
diff --git a/tex/context/base/data-tmp.lua b/tex/context/base/data-tmp.lua
index 25f5b975c..664e9ccff 100644
--- a/tex/context/base/data-tmp.lua
+++ b/tex/context/base/data-tmp.lua
@@ -1,5 +1,5 @@
if not modules then modules = { } end modules ['data-tmp'] = {
- version = 1.001,
+ version = 1.100,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
@@ -22,63 +22,141 @@ being written at the same time is small. We also need to extend
luatools with a recache feature.</p>
--ldx]]--
-local format, lower, gsub = string.format, string.lower, string.gsub
+local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
+local mkdirs, isdir = dir.mkdirs, lfs.isdir
-local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) -- not used yet
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end)
+
+local report_cache = logs.new("cache")
+
+local report_resolvers = logs.new("resolvers")
caches = caches or { }
-caches.path = caches.path or nil
-caches.base = caches.base or "luatex-cache"
-caches.more = caches.more or "context"
-caches.direct = false -- true is faster but may need huge amounts of memory
-caches.tree = false
-caches.paths = caches.paths or nil
-caches.force = false
-caches.defaults = { "TEXMFCACHE", "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" }
-
-function caches.temp()
- local cachepath = nil
- local function check(list,isenv)
- if not cachepath then
- for k=1,#list do
- local v = list[k]
- cachepath = (isenv and (os.env[v] or "")) or v or ""
- if cachepath == "" then
- -- next
- else
- cachepath = resolvers.clean_path(cachepath)
- if lfs.isdir(cachepath) and file.iswritable(cachepath) then -- lfs.attributes(cachepath,"mode") == "directory"
- break
- elseif caches.force or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
- dir.mkdirs(cachepath)
- if lfs.isdir(cachepath) and file.iswritable(cachepath) then
- break
+caches.base = caches.base or "luatex-cache"
+caches.more = caches.more or "context"
+caches.direct = false -- true is faster but may need huge amounts of memory
+caches.tree = false
+caches.force = true
+caches.ask = false
+caches.defaults = { "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" }
+
+local writable, readables, usedreadables = nil, { }, { }
+
+-- we could use a metatable for writable and readable but not yet
+
+local function identify()
+ -- Combining the loops makes it messy. First we check the format cache path
+ -- and when the last component is not present we try to create it.
+ local texmfcaches = resolvers.clean_path_list("TEXMFCACHE")
+ if texmfcaches then
+ for k=1,#texmfcaches do
+ local cachepath = texmfcaches[k]
+ if cachepath ~= "" then
+ cachepath = resolvers.clean_path(cachepath)
+ cachepath = file.collapse_path(cachepath)
+ local valid = isdir(cachepath)
+ if valid then
+ if file.isreadable(cachepath) then
+ readables[#readables+1] = cachepath
+ if not writable and file.iswritable(cachepath) then
+ writable = cachepath
+ end
+ end
+ elseif not writable and caches.force then
+ local cacheparent = file.dirname(cachepath)
+ if file.iswritable(cacheparent) then
+ if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
+ mkdirs(cachepath)
+ if isdir(cachepath) and file.iswritable(cachepath) then
+ report_cache("created: %s",cachepath)
+ writable = cachepath
+ readables[#readables+1] = cachepath
+ end
end
end
end
- cachepath = nil
end
end
end
- check(resolvers.clean_path_list("TEXMFCACHE") or { })
- check(caches.defaults,true)
- if not cachepath then
- print("\nfatal error: there is no valid (writable) cache path defined\n")
+ -- As a last resort we check some temporary paths but this time we don't
+ -- create them.
+ local texmfcaches = caches.defaults
+ if texmfcaches then
+ for k=1,#texmfcaches do
+ local cachepath = texmfcaches[k]
+ cachepath = resolvers.getenv(cachepath)
+ if cachepath ~= "" then
+ cachepath = resolvers.clean_path(cachepath)
+ local valid = isdir(cachepath)
+ if valid and file.isreadable(cachepath) then
+ if not writable and file.iswritable(cachepath) then
+ readables[#readables+1] = cachepath
+ writable = cachepath
+ break
+ end
+ end
+ end
+ end
+ end
+ -- Some extra checking. If we have no writable or readable path then we simply
+ -- quit.
+ if not writable then
+ report_cache("fatal error: there is no valid writable cache path defined")
os.exit()
- elseif not lfs.isdir(cachepath) then -- lfs.attributes(cachepath,"mode") ~= "directory"
- print(format("\nfatal error: cache path %s is not a directory\n",cachepath))
+ elseif #readables == 0 then
+ report_cache("fatal error: there is no valid readable cache path defined")
os.exit()
end
- cachepath = file.collapse_path(cachepath)
- function caches.temp()
- return cachepath
+ -- why here
+ writable = dir.expand_name(resolvers.clean_path(writable)) -- just in case
+ -- moved here
+ local base, more, tree = caches.base, caches.more, caches.tree or caches.treehash() -- we have only one writable tree
+ if tree then
+ caches.tree = tree
+ writable = mkdirs(writable,base,more,tree)
+ for i=1,#readables do
+ readables[i] = file.join(readables[i],base,more,tree)
+ end
+ else
+ writable = mkdirs(writable,base,more)
+ for i=1,#readables do
+ readables[i] = file.join(readables[i],base,more)
+ end
+ end
+ -- end
+ if trace_cache then
+ for i=1,#readables do
+ report_cache("using readable path '%s' (order %s)",readables[i],i)
+ end
+ report_cache("using writable path '%s'",writable)
+ end
+ identify = function()
+ return writable, readables
+ end
+ return writable, readables
+end
+
+function caches.usedpaths()
+ local writable, readables = identify()
+ if #readables > 1 then
+ local result = { }
+ for i=1,#readables do
+ local readable = readables[i]
+ if usedreadables[i] or readable == writable then
+ result[#result+1] = format("readable: '%s' (order %s)",readable,i)
+ end
+ end
+ result[#result+1] = format("writable: '%s'",writable)
+ return result
+ else
+ return writable
end
- return cachepath
end
-function caches.configpath()
- return table.concat(resolvers.instance.cnffiles,";")
+function caches.configfiles()
+ return table.concat(resolvers.instance.specification,";")
end
function caches.hashed(tree)
@@ -86,7 +164,7 @@ function caches.hashed(tree)
end
function caches.treehash()
- local tree = caches.configpath()
+ local tree = caches.configfiles()
if not tree or tree == "" then
return false
else
@@ -94,35 +172,68 @@ function caches.treehash()
end
end
-function caches.setpath(...)
- if not caches.path then
- if not caches.path then
- caches.path = caches.temp()
- end
- caches.path = resolvers.clean_path(caches.path) -- to be sure
- caches.tree = caches.tree or caches.treehash()
- if caches.tree then
- caches.path = dir.mkdirs(caches.path,caches.base,caches.more,caches.tree)
+local r_cache, w_cache = { }, { } -- normally w in in r but who cares
+
+local function getreadablepaths(...) -- we can optimize this as we have at most 2 tags
+ local tags = { ... }
+ local hash = concat(tags,"/")
+ local done = r_cache[hash]
+ if not done then
+ local writable, readables = identify() -- exit if not found
+ if #tags > 0 then
+ done = { }
+ for i=1,#readables do
+ done[i] = file.join(readables[i],...)
+ end
else
- caches.path = dir.mkdirs(caches.path,caches.base,caches.more)
+ done = readables
end
+ r_cache[hash] = done
end
- if not caches.path then
- caches.path = '.'
+ return done
+end
+
+local function getwritablepath(...)
+ local tags = { ... }
+ local hash = concat(tags,"/")
+ local done = w_cache[hash]
+ if not done then
+ local writable, readables = identify() -- exit if not found
+ if #tags > 0 then
+ done = mkdirs(writable,...)
+ else
+ done = writable
+ end
+ w_cache[hash] = done
end
- caches.path = resolvers.clean_path(caches.path)
- local dirs = { ... }
- if #dirs > 0 then
- local pth = dir.mkdirs(caches.path,...)
- return pth
+ return done
+end
+
+caches.getreadablepaths = getreadablepaths
+caches.getwritablepath = getwritablepath
+
+function caches.getfirstreadablefile(filename,...)
+ local rd = getreadablepaths(...)
+ for i=1,#rd do
+ local path = rd[i]
+ local fullname = file.join(path,filename)
+ if file.isreadable(fullname) then
+ usedreadables[i] = true
+ return fullname, path
+ end
end
- caches.path = dir.expand_name(caches.path)
- return caches.path
+ return caches.setfirstwritablefile(filename,...)
end
-function caches.definepath(category,subcategory)
+function caches.setfirstwritablefile(filename,...)
+ local wr = getwritablepath(...)
+ local fullname = file.join(wr,filename)
+ return fullname, wr
+end
+
+function caches.define(category,subcategory) -- for old times sake
return function()
- return caches.setpath(category,subcategory)
+ return getwritablepath(category,subcategory)
end
end
@@ -130,23 +241,23 @@ function caches.setluanames(path,name)
return path .. "/" .. name .. ".tma", path .. "/" .. name .. ".tmc"
end
-function caches.loaddata(path,name)
- local tmaname, tmcname = caches.setluanames(path,name)
- local loader = loadfile(tmcname) or loadfile(tmaname)
- if loader then
- loader = loader()
- collectgarbage("step")
- return loader
- else
- return false
+function caches.loaddata(readables,name)
+ if type(readables) == "string" then
+ readables = { readables }
end
+ for i=1,#readables do
+ local path = readables[i]
+ local tmaname, tmcname = caches.setluanames(path,name)
+ local loader = loadfile(tmcname) or loadfile(tmaname)
+ if loader then
+ loader = loader()
+ collectgarbage("step")
+ return loader
+ end
+ end
+ return false
end
---~ function caches.loaddata(path,name)
---~ local tmaname, tmcname = caches.setluanames(path,name)
---~ return dofile(tmcname) or dofile(tmaname)
---~ end
-
function caches.iswritable(filepath,filename)
local tmaname, tmcname = caches.setluanames(filepath,filename)
return file.iswritable(tmaname)
@@ -169,10 +280,79 @@ function caches.savedata(filepath,filename,data,raw)
utils.lua.compile(tmaname, tmcname, cleanup, strip)
end
--- here we use the cache for format loading (texconfig.[formatname|jobname])
+-- moved from data-res:
+
+local content_state = { }
+
+function caches.contentstate()
+ return content_state or { }
+end
+
+function caches.loadcontent(cachename,dataname)
+ local name = caches.hashed(cachename)
+ local full, path = caches.getfirstreadablefile(name ..".lua","trees")
+ local filename = file.join(path,name)
+ local blob = loadfile(filename .. ".luc") or loadfile(filename .. ".lua")
+ if blob then
+ local data = blob()
+ if data and data.content and data.type == dataname and data.version == resolvers.cacheversion then
+ content_state[#content_state+1] = data.uuid
+ if trace_locating then
+ report_resolvers("loading '%s' for '%s' from '%s'",dataname,cachename,filename)
+ end
+ return data.content
+ elseif trace_locating then
+ report_resolvers("skipping '%s' for '%s' from '%s'",dataname,cachename,filename)
+ end
+ elseif trace_locating then
+ report_resolvers("skipping '%s' for '%s' from '%s'",dataname,cachename,filename)
+ end
+end
+
+function caches.collapsecontent(content)
+ for k, v in next, content do
+ if type(v) == "table" and #v == 1 then
+ content[k] = v[1]
+ end
+ end
+end
---~ if tex and texconfig and texconfig.formatname and texconfig.formatname == "" then
-if tex and texconfig and (not texconfig.formatname or texconfig.formatname == "") and input and resolvers.instance then
- if not texconfig.luaname then texconfig.luaname = "cont-en.lua" end -- or luc
- texconfig.formatname = caches.setpath("formats") .. "/" .. gsub(texconfig.luaname,"%.lu.$",".fmt")
+function caches.savecontent(cachename,dataname,content)
+ local name = caches.hashed(cachename)
+ local full, path = caches.setfirstwritablefile(name ..".lua","trees")
+ local filename = file.join(path,name) -- is full
+ local luaname, lucname = filename .. ".lua", filename .. ".luc"
+ if trace_locating then
+ report_resolvers("preparing '%s' for '%s'",dataname,cachename)
+ end
+ local data = {
+ type = dataname,
+ root = cachename,
+ version = resolvers.cacheversion,
+ date = os.date("%Y-%m-%d"),
+ time = os.date("%H:%M:%S"),
+ content = content,
+ uuid = os.uuid(),
+ }
+ local ok = io.savedata(luaname,table.serialize(data,true))
+ if ok then
+ if trace_locating then
+ report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
+ end
+ if utils.lua.compile(luaname,lucname,false,true) then -- no cleanup but strip
+ if trace_locating then
+ report_resolvers("'%s' compiled to '%s'",dataname,lucname)
+ end
+ return true
+ else
+ if trace_locating then
+ report_resolvers("compiling failed for '%s', deleting file '%s'",dataname,lucname)
+ end
+ os.remove(lucname)
+ end
+ elseif trace_locating then
+ report_resolvers("unable to save '%s' in '%s' (access error)",dataname,luaname)
+ end
end
+
+
diff --git a/tex/context/base/data-tre.lua b/tex/context/base/data-tre.lua
index d5ca258e4..75cb39c3a 100644
--- a/tex/context/base/data-tre.lua
+++ b/tex/context/base/data-tre.lua
@@ -8,14 +8,14 @@ if not modules then modules = { } end modules ['data-tre'] = {
-- \input tree://oeps1/**/oeps.tex
-local find, gsub = string.find, string.gsub
+local find, gsub, format = string.find, string.gsub, string.format
local unpack = unpack or table.unpack
-local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
+local report_resolvers = logs.new("resolvers")
-local done, found = { }, { }
+local done, found, notfound = { }, { }, resolvers.finders.notfound
-function finders.tree(specification,filetype)
+function resolvers.finders.tree(specification,filetype)
local fnd = found[specification]
if not fnd then
local spec = resolvers.splitmethod(specification).path or ""
@@ -37,11 +37,45 @@ function finders.tree(specification,filetype)
end
end
end
- fnd = unpack(finders.notfound)
+ fnd = unpack(notfound) -- unpack ? why not just notfound[1]
found[specification] = fnd
end
return fnd
end
-openers.tree = openers.generic
-loaders.tree = loaders.generic
+function resolvers.locators.tree(specification)
+ local spec = resolvers.splitmethod(specification)
+ local path = spec.path
+ if path ~= '' and lfs.isdir(path) then
+ if trace_locating then
+ report_resolvers("tree locator '%s' found (%s)",path,specification)
+ end
+ resolvers.append_hash('tree',specification,path,false) -- don't cache
+ elseif trace_locating then
+ report_resolvers("tree locator '%s' not found",path)
+ end
+end
+
+function resolvers.hashers.tree(tag,name)
+ if trace_locating then
+ report_resolvers("analysing tree '%s' as '%s'",name,tag)
+ end
+ -- todo: maybe share with done above
+ local spec = resolvers.splitmethod(tag)
+ local path = spec.path
+ resolvers.generators.tex(path,tag) -- we share this with the normal tree analyzer
+end
+
+function resolvers.generators.tree(tag)
+ local spec = resolvers.splitmethod(tag)
+ local path = spec.path
+ resolvers.generators.tex(path,tag) -- we share this with the normal tree analyzer
+end
+
+function resolvers.concatinators.tree(tag,path,name)
+ return file.join(tag,path,name)
+end
+
+resolvers.isreadable.tree = file.isreadable
+resolvers.openers.tree = resolvers.openers.generic
+resolvers.loaders.tree = resolvers.loaders.generic
diff --git a/tex/context/base/data-use.lua b/tex/context/base/data-use.lua
index 5ecd7805f..1093a4ac7 100644
--- a/tex/context/base/data-use.lua
+++ b/tex/context/base/data-use.lua
@@ -10,41 +10,7 @@ local format, lower, gsub, find = string.format, string.lower, string.gsub, stri
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
--- since we want to use the cache instead of the tree, we will now
--- reimplement the saver.
-
-local save_data = resolvers.save_data
-local load_data = resolvers.load_data
-
-resolvers.cachepath = nil -- public, for tracing
-resolvers.usecache = true -- public, for tracing
-
-function resolvers.save_data(dataname)
- save_data(dataname, function(cachename,dataname)
- resolvers.usecache = not toboolean(resolvers.expansion("CACHEINTDS") or "false",true)
- if resolvers.usecache then
- resolvers.cachepath = resolvers.cachepath or caches.definepath("trees")
- return file.join(resolvers.cachepath(),caches.hashed(cachename))
- else
- return file.join(cachename,dataname)
- end
- end)
-end
-
-function resolvers.load_data(pathname,dataname,filename)
- load_data(pathname,dataname,filename,function(dataname,filename)
- resolvers.usecache = not toboolean(resolvers.expansion("CACHEINTDS") or "false",true)
- if resolvers.usecache then
- resolvers.cachepath = resolvers.cachepath or caches.definepath("trees")
- return file.join(resolvers.cachepath(),caches.hashed(pathname))
- else
- if not filename or (filename == "") then
- filename = dataname
- end
- return file.join(pathname,filename)
- end
- end)
-end
+local report_resolvers = logs.new("resolvers")
-- we will make a better format, maybe something xml or just text or lua
@@ -53,7 +19,7 @@ resolvers.automounted = resolvers.automounted or { }
function resolvers.automount(usecache)
local mountpaths = resolvers.clean_path_list(resolvers.expansion('TEXMFMOUNT'))
if (not mountpaths or #mountpaths == 0) and usecache then
- mountpaths = { caches.setpath("mount") }
+ mountpaths = caches.getreadablepaths("mount")
end
if mountpaths and #mountpaths > 0 then
statistics.starttiming(resolvers.instance)
@@ -67,7 +33,7 @@ function resolvers.automount(usecache)
-- skip
elseif find(line,"^zip://") then
if trace_locating then
- logs.report("fileio","mounting %s",line)
+ report_resolvers("mounting %s",line)
end
table.insert(resolvers.automounted,line)
resolvers.usezipfile(line)
@@ -83,8 +49,8 @@ end
-- status info
-statistics.register("used config path", function() return caches.configpath() end)
-statistics.register("used cache path", function() return caches.temp() or "?" end)
+statistics.register("used config file", function() return caches.configfiles() end)
+statistics.register("used cache path", function() return caches.usedpaths() end)
-- experiment (code will move)
@@ -112,11 +78,11 @@ function statistics.check_fmt_status(texname)
local sourcehash = md5.hex(io.loaddata(resolvers.find_file(luv.sourcefile)) or "unknown")
local luvbanner = luv.enginebanner or "?"
if luvbanner ~= enginebanner then
- return string.format("engine mismatch (luv:%s <> bin:%s)",luvbanner,enginebanner)
+ return format("engine mismatch (luv: %s <> bin: %s)",luvbanner,enginebanner)
end
local luvhash = luv.sourcehash or "?"
if luvhash ~= sourcehash then
- return string.format("source mismatch (luv:%s <> bin:%s)",luvhash,sourcehash)
+ return format("source mismatch (luv: %s <> bin: %s)",luvhash,sourcehash)
end
else
return "invalid status file"
diff --git a/tex/context/base/data-zip.lua b/tex/context/base/data-zip.lua
index aa3740a83..a43a19b3f 100644
--- a/tex/context/base/data-zip.lua
+++ b/tex/context/base/data-zip.lua
@@ -11,6 +11,8 @@ local unpack = unpack or table.unpack
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local report_resolvers = logs.new("resolvers")
+
-- zip:///oeps.zip?name=bla/bla.tex
-- zip:///oeps.zip?tree=tex/texmf-local
-- zip:///texmf.zip?tree=/tex/texmf
@@ -61,16 +63,16 @@ function locators.zip(specification) -- where is this used? startup zips (untest
local zfile = zip.openarchive(name) -- tricky, could be in to be initialized tree
if trace_locating then
if zfile then
- logs.report("fileio","zip locator, archive '%s' found",specification.original)
+ report_resolvers("zip locator, archive '%s' found",specification.original)
else
- logs.report("fileio","zip locator, archive '%s' not found",specification.original)
+ report_resolvers("zip locator, archive '%s' not found",specification.original)
end
end
end
function hashers.zip(tag,name)
if trace_locating then
- logs.report("fileio","loading zip file '%s' as '%s'",name,tag)
+ report_resolvers("loading zip file '%s' as '%s'",name,tag)
end
resolvers.usezipfile(format("%s?tree=%s",tag,name))
end
@@ -95,25 +97,25 @@ function finders.zip(specification,filetype)
local zfile = zip.openarchive(specification.path)
if zfile then
if trace_locating then
- logs.report("fileio","zip finder, archive '%s' found",specification.path)
+ report_resolvers("zip finder, archive '%s' found",specification.path)
end
local dfile = zfile:open(q.name)
if dfile then
dfile = zfile:close()
if trace_locating then
- logs.report("fileio","zip finder, file '%s' found",q.name)
+ report_resolvers("zip finder, file '%s' found",q.name)
end
return specification.original
elseif trace_locating then
- logs.report("fileio","zip finder, file '%s' not found",q.name)
+ report_resolvers("zip finder, file '%s' not found",q.name)
end
elseif trace_locating then
- logs.report("fileio","zip finder, unknown archive '%s'",specification.path)
+ report_resolvers("zip finder, unknown archive '%s'",specification.path)
end
end
end
if trace_locating then
- logs.report("fileio","zip finder, '%s' not found",filename)
+ report_resolvers("zip finder, '%s' not found",filename)
end
return unpack(finders.notfound)
end
@@ -126,25 +128,25 @@ function openers.zip(specification)
local zfile = zip.openarchive(zipspecification.path)
if zfile then
if trace_locating then
- logs.report("fileio","zip opener, archive '%s' opened",zipspecification.path)
+ report_resolvers("zip opener, archive '%s' opened",zipspecification.path)
end
local dfile = zfile:open(q.name)
if dfile then
logs.show_open(specification)
if trace_locating then
- logs.report("fileio","zip opener, file '%s' found",q.name)
+ report_resolvers("zip opener, file '%s' found",q.name)
end
return openers.text_opener(specification,dfile,'zip')
elseif trace_locating then
- logs.report("fileio","zip opener, file '%s' not found",q.name)
+ report_resolvers("zip opener, file '%s' not found",q.name)
end
elseif trace_locating then
- logs.report("fileio","zip opener, unknown archive '%s'",zipspecification.path)
+ report_resolvers("zip opener, unknown archive '%s'",zipspecification.path)
end
end
end
if trace_locating then
- logs.report("fileio","zip opener, '%s' not found",filename)
+ report_resolvers("zip opener, '%s' not found",filename)
end
return unpack(openers.notfound)
end
@@ -157,27 +159,27 @@ function loaders.zip(specification)
local zfile = zip.openarchive(specification.path)
if zfile then
if trace_locating then
- logs.report("fileio","zip loader, archive '%s' opened",specification.path)
+ report_resolvers("zip loader, archive '%s' opened",specification.path)
end
local dfile = zfile:open(q.name)
if dfile then
logs.show_load(filename)
if trace_locating then
- logs.report("fileio","zip loader, file '%s' loaded",filename)
+ report_resolvers("zip loader, file '%s' loaded",filename)
end
local s = dfile:read("*all")
dfile:close()
return true, s, #s
elseif trace_locating then
- logs.report("fileio","zip loader, file '%s' not found",q.name)
+ report_resolvers("zip loader, file '%s' not found",q.name)
end
elseif trace_locating then
- logs.report("fileio","zip loader, unknown archive '%s'",specification.path)
+ report_resolvers("zip loader, unknown archive '%s'",specification.path)
end
end
end
if trace_locating then
- logs.report("fileio","zip loader, '%s' not found",filename)
+ report_resolvers("zip loader, '%s' not found",filename)
end
return unpack(openers.notfound)
end
@@ -195,7 +197,7 @@ function resolvers.usezipfile(zipname)
if z then
local instance = resolvers.instance
if trace_locating then
- logs.report("fileio","zip registering, registering archive '%s'",zipname)
+ report_resolvers("zip registering, registering archive '%s'",zipname)
end
statistics.starttiming(instance)
resolvers.prepend_hash('zip',zipname,zipfile)
@@ -204,10 +206,10 @@ function resolvers.usezipfile(zipname)
instance.files[zipname] = resolvers.register_zip_file(z,tree or "")
statistics.stoptiming(instance)
elseif trace_locating then
- logs.report("fileio","zip registering, unknown archive '%s'",zipname)
+ report_resolvers("zip registering, unknown archive '%s'",zipname)
end
elseif trace_locating then
- logs.report("fileio","zip registering, '%s' not found",zipname)
+ report_resolvers("zip registering, '%s' not found",zipname)
end
end
@@ -219,7 +221,7 @@ function resolvers.register_zip_file(z,tree)
filter = format("^%s/(.+)/(.-)$",tree)
end
if trace_locating then
- logs.report("fileio","zip registering, using filter '%s'",filter)
+ report_resolvers("zip registering, using filter '%s'",filter)
end
local register, n = resolvers.register_file, 0
for i in z:files() do
@@ -236,6 +238,6 @@ function resolvers.register_zip_file(z,tree)
n = n + 1
end
end
- logs.report("fileio","zip registering, %s files registered",n)
+ report_resolvers("zip registering, %s files registered",n)
return files
end
diff --git a/tex/context/base/font-afm.lua b/tex/context/base/font-afm.lua
index 87dec59c6..072b3e59f 100644
--- a/tex/context/base/font-afm.lua
+++ b/tex/context/base/font-afm.lua
@@ -21,6 +21,8 @@ local trace_features = false trackers.register("afm.features", function(v) trac
local trace_indexing = false trackers.register("afm.indexing", function(v) trace_indexing = v end)
local trace_loading = false trackers.register("afm.loading", function(v) trace_loading = v end)
+local report_afm = logs.new("load afm")
+
local format, match, gmatch, lower, gsub = string.format, string.match, string.gmatch, string.lower, string.gsub
local lpegmatch = lpeg.match
local abs = math.abs
@@ -163,50 +165,41 @@ local function get_variables(data,fontmetrics)
end
end
-local function get_indexes(data,filename)
- local pfbfile = file.replacesuffix(filename,"pfb")
- local pfbname = resolvers.find_file(pfbfile,"pfb") or ""
- if pfbname == "" then
- pfbname = resolvers.find_file(file.basename(pfbfile),"pfb") or ""
- end
- if pfbname ~= "" then
- data.luatex.filename = pfbname
- local pfbblob = fontloader.open(pfbname)
- if pfbblob then
- local characters = data.characters
- local pfbdata = fontloader.to_table(pfbblob)
- --~ print(table.serialize(pfbdata))
- if pfbdata then
- local glyphs = pfbdata.glyphs
- if glyphs then
- if trace_loading then
- logs.report("load afm","getting index data from %s",pfbname)
- end
- -- local offset = (glyphs[0] and glyphs[0] != .notdef) or 0
- for index, glyph in next, glyphs do
- local name = glyph.name
- if name then
- local char = characters[name]
- if char then
- if trace_indexing then
- logs.report("load afm","glyph %s has index %s",name,index)
- end
- char.index = index
+local function get_indexes(data,pfbname)
+ data.luatex.filename = pfbname
+ local pfbblob = fontloader.open(pfbname)
+ if pfbblob then
+ local characters = data.characters
+ local pfbdata = fontloader.to_table(pfbblob)
+ --~ print(table.serialize(pfbdata))
+ if pfbdata then
+ local glyphs = pfbdata.glyphs
+ if glyphs then
+ if trace_loading then
+ report_afm("getting index data from %s",pfbname)
+ end
+ -- local offset = (glyphs[0] and glyphs[0] != .notdef) or 0
+ for index, glyph in next, glyphs do
+ local name = glyph.name
+ if name then
+ local char = characters[name]
+ if char then
+ if trace_indexing then
+ report_afm("glyph %s has index %s",name,index)
end
+ char.index = index
end
end
- elseif trace_loading then
- logs.report("load afm","no glyph data in pfb file %s",pfbname)
end
elseif trace_loading then
- logs.report("load afm","no data in pfb file %s",pfbname)
+ report_afm("no glyph data in pfb file %s",pfbname)
end
- fontloader.close(pfbblob)
elseif trace_loading then
- logs.report("load afm","invalid pfb file %s",pfbname)
+ report_afm("no data in pfb file %s",pfbname)
end
+ fontloader.close(pfbblob)
elseif trace_loading then
- logs.report("load afm","no pfb file for %s",filename)
+ report_afm("invalid pfb file %s",pfbname)
end
end
@@ -223,21 +216,21 @@ function afm.read_afm(filename)
}
afmblob = gsub(afmblob,"StartCharMetrics(.-)EndCharMetrics", function(charmetrics)
if trace_loading then
- logs.report("load afm","loading char metrics")
+ report_afm("loading char metrics")
end
get_charmetrics(data,charmetrics,vector)
return ""
end)
afmblob = gsub(afmblob,"StartKernPairs(.-)EndKernPairs", function(kernpairs)
if trace_loading then
- logs.report("load afm","loading kern pairs")
+ report_afm("loading kern pairs")
end
get_kernpairs(data,kernpairs)
return ""
end)
afmblob = gsub(afmblob,"StartFontMetrics%s+([%d%.]+)(.-)EndFontMetrics", function(version,fontmetrics)
if trace_loading then
- logs.report("load afm","loading variables")
+ report_afm("loading variables")
end
data.afmversion = version
get_variables(data,fontmetrics)
@@ -245,11 +238,10 @@ function afm.read_afm(filename)
return ""
end)
data.luatex = { }
- get_indexes(data,filename)
return data
else
if trace_loading then
- logs.report("load afm","no valid afm file %s",filename)
+ report_afm("no valid afm file %s",filename)
end
return nil
end
@@ -266,30 +258,51 @@ function afm.load(filename)
filename = resolvers.find_file(filename,'afm') or ""
if filename ~= "" then
local name = file.removesuffix(file.basename(filename))
- local data = containers.read(afm.cache(),name)
- local size = lfs.attributes(filename,"size") or 0
- if not data or data.verbose ~= fonts.verbose or data.size ~= size then
- logs.report("load afm", "reading %s",filename)
+ local data = containers.read(afm.cache,name)
+ local attr = lfs.attributes(filename)
+ local size, time = attr.size or 0, attr.modification or 0
+ --
+ local pfbfile = file.replacesuffix(name,"pfb")
+ local pfbname = resolvers.find_file(pfbfile,"pfb") or ""
+ if pfbname == "" then
+ pfbname = resolvers.find_file(file.basename(pfbfile),"pfb") or ""
+ end
+ local pfbsize, pfbtime = 0, 0
+ if pfbname ~= "" then
+ local attr = lfs.attributes(pfbname)
+ pfbsize, pfbtime = attr.size or 0, attr.modification or 0
+ end
+ if not data or data.verbose ~= fonts.verbose
+ or data.size ~= size or data.time ~= time or data.pfbsize ~= pfbsize or data.pfbtime ~= pfbtime then
+ report_afm( "reading %s",filename)
data = afm.read_afm(filename)
if data then
-- data.luatex = data.luatex or { }
- logs.report("load afm", "unifying %s",filename)
+ if pfbname ~= "" then
+ get_indexes(data,pfbname)
+ elseif trace_loading then
+ report_afm("no pfb file for %s",filename)
+ end
+ report_afm( "unifying %s",filename)
afm.unify(data,filename)
if afm.enhance_data then
- logs.report("load afm", "add ligatures")
+ report_afm( "add ligatures")
afm.add_ligatures(data,'ligatures') -- easier this way
- logs.report("load afm", "add tex-ligatures")
+ report_afm( "add tex-ligatures")
afm.add_ligatures(data,'texligatures') -- easier this way
- logs.report("load afm", "add extra kerns")
+ report_afm( "add extra kerns")
afm.add_kerns(data) -- faster this way
end
- logs.report("load afm", "add tounicode data")
+ report_afm( "add tounicode data")
fonts.map.add_to_unicode(data,filename)
data.size = size
+ data.time = time
+ data.pfbsize = pfbsize
+ data.pfbtime = pfbtime
data.verbose = fonts.verbose
- logs.report("load afm","saving: %s in cache",name)
- data = containers.write(afm.cache(), name, data)
- data = containers.read(afm.cache(),name)
+ report_afm("saving: %s in cache",name)
+ data = containers.write(afm.cache, name, data)
+ data = containers.read(afm.cache,name)
end
end
return data
@@ -310,7 +323,7 @@ function afm.unify(data, filename)
if not code then
code = private
private = private + 1
- logs.report("afm glyph", "assigning private slot U+%04X for unknown glyph name %s", code, name)
+ report_afm("assigning private slot U+%04X for unknown glyph name %s", code, name)
end
end
local index = blob.index
@@ -476,7 +489,7 @@ function afm.copy_to_tfm(data)
local filename = fonts.tfm.checked_filename(luatex) -- was metadata.filename
local fontname = metadata.fontname or metadata.fullname
local fullname = metadata.fullname or metadata.fontname
- local endash, emdash, space, spaceunits = unicodes['space'], unicodes['emdash'], "space", 500
+ local endash, emdash, spacer, spaceunits = unicodes['space'], unicodes['emdash'], "space", 500
-- same as otf
if metadata.isfixedpitch then
if descriptions[endash] then
@@ -607,7 +620,7 @@ function afm.set_features(tfmdata)
local value = features[f]
if value and fiafm[f] then -- brr
if trace_features then
- logs.report("define afm","initializing feature %s to %s for mode %s for font %s",f,tostring(value),mode or 'unknown',tfmdata.name or 'unknown')
+ report_afm("initializing feature %s to %s for mode %s for font %s",f,tostring(value),mode or 'unknown',tfmdata.name or 'unknown')
end
fiafm[f](tfmdata,value)
mode = tfmdata.mode or fonts.mode
@@ -619,7 +632,7 @@ function afm.set_features(tfmdata)
end
local fm = fonts.methods[mode]
local fmafm = fm and fm.afm
- if fmfm then
+ if fmafm then
local lists = {
afm.features.list,
}
@@ -656,13 +669,13 @@ function afm.afm_to_tfm(specification)
local afmname = specification.filename or specification.name
if specification.forced == "afm" or specification.format == "afm" then -- move this one up
if trace_loading then
- logs.report("load afm","forcing afm format for %s",afmname)
+ report_afm("forcing afm format for %s",afmname)
end
else
local tfmname = resolvers.findbinfile(afmname,"ofm") or ""
if tfmname ~= "" then
if trace_loading then
- logs.report("load afm","fallback from afm to tfm for %s",afmname)
+ report_afm("fallback from afm to tfm for %s",afmname)
end
afmname = ""
end
@@ -674,7 +687,7 @@ function afm.afm_to_tfm(specification)
specification = fonts.define.resolve(specification) -- new, was forgotten
local features = specification.features.normal
local cache_id = specification.hash
- local tfmdata = containers.read(tfm.cache(), cache_id) -- cache with features applied
+ local tfmdata = containers.read(tfm.cache, cache_id) -- cache with features applied
if not tfmdata then
local afmdata = afm.load(afmname)
if afmdata and next(afmdata) then
@@ -688,9 +701,9 @@ function afm.afm_to_tfm(specification)
afm.set_features(tfmdata)
end
elseif trace_loading then
- logs.report("load afm","no (valid) afm file found with name %s",afmname)
+ report_afm("no (valid) afm file found with name %s",afmname)
end
- tfmdata = containers.write(tfm.cache(),cache_id,tfmdata)
+ tfmdata = containers.write(tfm.cache,cache_id,tfmdata)
end
return tfmdata
end
@@ -720,18 +733,6 @@ function tfm.read_from_afm(specification)
tfmtable.name = specification.name
tfmtable = tfm.scale(tfmtable, specification.size, specification.relativeid)
local afmdata = tfmtable.shared.afmdata
---~ local filename = afmdata and afmdata.luatex and afmdata.luatex.filename
---~ if filename then
---~ tfmtable.encodingbytes = 2
---~ tfmtable.filename = resolvers.findbinfile(filename,"") or filename
---~ tfmtable.fontname = afmdata.metadata.fontname or afmdata.metadata.fullname
---~ tfmtable.fullname = afmdata.metadata.fullname or afmdata.metadata.fontname
---~ tfmtable.format = 'type1'
---~ tfmtable.name = afmdata.luatex.filename or tfmtable.fullname
---~ end
- if fonts.dontembed[filename] then
- tfmtable.file = nil -- or filename ?
- end
fonts.logger.save(tfmtable,'afm',specification)
end
return tfmtable
diff --git a/tex/context/base/font-chk.lua b/tex/context/base/font-chk.lua
index 32fdf8894..dc13a4aee 100644
--- a/tex/context/base/font-chk.lua
+++ b/tex/context/base/font-chk.lua
@@ -8,6 +8,8 @@ if not modules then modules = { } end modules ['font-chk'] = {
-- possible optimization: delayed initialization of vectors
+local report_fonts = logs.new("fonts")
+
fonts = fonts or { }
fonts.checkers = fonts.checkers or { }
@@ -40,7 +42,7 @@ function fonts.register_message(font,char,message)
messages[message] = category
end
if not category[char] then
- logs.report("fonts","char U+%04X in font '%s' with id %s: %s",char,tfmdata.fullname,font,message)
+ report_fonts("char U+%04X in font '%s' with id %s: %s",char,tfmdata.fullname,font,message)
category[char] = true
end
end
diff --git a/tex/context/base/font-cid.lua b/tex/context/base/font-cid.lua
index d1c727af2..84d676782 100644
--- a/tex/context/base/font-cid.lua
+++ b/tex/context/base/font-cid.lua
@@ -12,6 +12,8 @@ local lpegmatch = lpeg.match
local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
+local report_otf = logs.new("load otf")
+
fonts = fonts or { }
fonts.cid = fonts.cid or { }
fonts.cid.map = fonts.cid.map or { }
@@ -86,14 +88,14 @@ local function locate(registry,ordering,supplement)
local cidmap = fonts.cid.map[hashname]
if not cidmap then
if trace_loading then
- logs.report("load otf","checking cidmap, registry: %s, ordering: %s, supplement: %s, filename: %s",registry,ordering,supplement,filename)
+ report_otf("checking cidmap, registry: %s, ordering: %s, supplement: %s, filename: %s",registry,ordering,supplement,filename)
end
local fullname = resolvers.find_file(filename,'cid') or ""
if fullname ~= "" then
cidmap = fonts.cid.load(fullname)
if cidmap then
if trace_loading then
- logs.report("load otf","using cidmap file %s",filename)
+ report_otf("using cidmap file %s",filename)
end
fonts.cid.map[hashname] = cidmap
cidmap.usedname = file.basename(filename)
@@ -108,7 +110,7 @@ function fonts.cid.getmap(registry,ordering,supplement)
-- cf Arthur R. we can safely scan upwards since cids are downward compatible
local supplement = tonumber(supplement)
if trace_loading then
- logs.report("load otf","needed cidmap, registry: %s, ordering: %s, supplement: %s",registry,ordering,supplement)
+ report_otf("needed cidmap, registry: %s, ordering: %s, supplement: %s",registry,ordering,supplement)
end
local cidmap = locate(registry,ordering,supplement)
if not cidmap then
diff --git a/tex/context/base/font-clr.lua b/tex/context/base/font-clr.lua
new file mode 100644
index 000000000..ef98c2f06
--- /dev/null
+++ b/tex/context/base/font-clr.lua
@@ -0,0 +1,30 @@
+if not modules then modules = { } end modules ['font-clr'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- moved from ini:
+
+fonts.color = fonts.color or { } -- dummy in ini
+
+local set_attribute = node.set_attribute
+local unset_attribute = node.unset_attribute
+
+local attribute = attributes.private('color')
+local mapping = attributes and attributes.list[attribute] or { }
+
+function fonts.color.set(n,c)
+ local mc = mapping[c]
+ if not mc then
+ unset_attribute(n,attribute)
+ else
+ set_attribute(n,attribute,mc)
+ end
+end
+
+function fonts.color.reset(n)
+ unset_attribute(n,attribute)
+end
diff --git a/tex/context/base/font-col.lua b/tex/context/base/font-col.lua
index d313357a2..af72ff0da 100644
--- a/tex/context/base/font-col.lua
+++ b/tex/context/base/font-col.lua
@@ -16,6 +16,8 @@ local ctxcatcodes = tex.ctxcatcodes
local trace_collecting = false trackers.register("fonts.collecting", function(v) trace_collecting = v end)
+local report_fonts = logs.new("fonts")
+
local fontdata = fonts.ids
local glyph = node.id('glyph')
@@ -54,11 +56,11 @@ function collections.define(name,font,ranges,details)
local d = definitions[name]
if d then
if name and trace_collecting then
- logs.report("fonts","def: extending set %s using %s",name, font)
+ report_fonts("def: extending set %s using %s",name, font)
end
else
if name and trace_collecting then
- logs.report("fonts","def: defining set %s using %s",name, font)
+ report_fonts("def: defining set %s using %s",name, font)
end
d = { }
definitions[name] = d
@@ -70,12 +72,12 @@ function collections.define(name,font,ranges,details)
if start and stop then
if trace_collecting then
if description then
- logs.report("fonts","def: using range %s (U+%04x-U+%04X, %s)",s,start,stop,description)
+ report_fonts("def: using range %s (U+%04x-U+%04X, %s)",s,start,stop,description)
end
for i=1,#d do
local di = d[i]
if (start >= di.start and start <= di.stop) or (stop >= di.start and stop <= di.stop) then
- logs.report("fonts","def: overlapping ranges U+%04x-U+%04X and U+%04x-U+%04X",start,stop,di.start,di.stop)
+ report_fonts("def: overlapping ranges U+%04x-U+%04X and U+%04x-U+%04X",start,stop,di.start,di.stop)
end
end
end
@@ -88,7 +90,7 @@ end
function collections.stage_1(name)
local last = font.current()
if trace_collecting then
- logs.report("fonts","def: registering font %s with name %s",last,name)
+ report_fonts("def: registering font %s with name %s",last,name)
end
list[#list+1] = last
end
@@ -98,14 +100,14 @@ function collections.stage_2(name)
local d = definitions[name]
local t = { }
if trace_collecting then
- logs.report("fonts","def: process collection %s",name)
+ report_fonts("def: process collection %s",name)
end
for i=1,#d do
local f = d[i]
local id = list[i]
local start, stop = f.start, f.stop
if trace_collecting then
- logs.report("fonts","def: remapping font %s to %s for range U+%04X - U+%04X",current,id,start,stop)
+ report_fonts("def: remapping font %s to %s for range U+%04X - U+%04X",current,id,start,stop)
end
local check = toboolean(f.check or "false",true)
local force = toboolean(f.force or "true",true)
@@ -138,7 +140,7 @@ function collections.stage_2(name)
end
vectors[current] = t
if trace_collecting then
- logs.report("fonts","def: activating collection %s for font %s",name,current)
+ report_fonts("def: activating collection %s for font %s",name,current)
end
active = true
statistics.stoptiming(fonts)
@@ -159,7 +161,7 @@ function collections.prepare(name)
if d then
if trace_collecting then
local filename = file.basename(fontdata[current].filename or "?")
- logs.report("fonts","def: applying collection %s to %s (file: %s)",name,current,filename)
+ report_fonts("def: applying collection %s to %s (file: %s)",name,current,filename)
end
list = { }
texsprint(ctxcatcodes,"\\dostartcloningfonts") -- move this to tex \dostart...
@@ -178,13 +180,13 @@ function collections.prepare(name)
texsprint(ctxcatcodes,"\\dostopcloningfonts")
elseif trace_collecting then
local filename = file.basename(fontdata[current].filename or "?")
- logs.report("fonts","def: error in applying collection %s to %s (file: %s)",name,current,filename)
+ report_fonts("def: error in applying collection %s to %s (file: %s)",name,current,filename)
end
end
function collections.message(message)
if trace_collecting then
- logs.report("fonts","tex: %s",message)
+ report_fonts("tex: %s",message)
end
end
@@ -199,18 +201,20 @@ function collections.process(head)
if type(id) == "table" then
local newid, newchar = id[1], id[2]
if trace_collecting then
- logs.report("fonts","lst: remapping character %s in font %s to character %s in font %s",n.char,n.font,newchar,newid)
+ report_fonts("lst: remapping character %s in font %s to character %s in font %s",n.char,n.font,newchar,newid)
end
n.font, n.char = newid, newchar
else
if trace_collecting then
- logs.report("fonts","lst: remapping font %s to %s for character %s",n.font,id,n.char)
+ report_fonts("lst: remapping font %s to %s for character %s",n.font,id,n.char)
end
n.font = id
end
end
end
end
+ return head, done
+ else
+ return head, false
end
- return head, done
end
diff --git a/tex/context/base/font-ctx.lua b/tex/context/base/font-ctx.lua
index 76e9f095a..71d870559 100644
--- a/tex/context/base/font-ctx.lua
+++ b/tex/context/base/font-ctx.lua
@@ -19,6 +19,8 @@ local ctxcatcodes = tex.ctxcatcodes
local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
+local report_define = logs.new("define fonts")
+
local tfm = fonts.tfm
local define = fonts.define
local fontdata = fonts.identifiers
@@ -302,7 +304,7 @@ function define.command_1(str)
local fullname, size = lpegmatch(splitpattern,str)
local lookup, name, sub, method, detail = get_specification(fullname)
if not name then
- logs.report("define font","strange definition '%s'",str)
+ report_define("strange definition '%s'",str)
texsprint(ctxcatcodes,"\\fcglet\\somefontname\\defaultfontfile")
elseif name == "unknown" then
texsprint(ctxcatcodes,"\\fcglet\\somefontname\\defaultfontfile")
@@ -336,7 +338,7 @@ local n = 0
function define.command_2(global,cs,str,size,classfeatures,fontfeatures,classfallbacks,fontfallbacks,mathsize,textsize,relativeid)
if trace_defining then
- logs.report("define font","memory usage before: %s",statistics.memused())
+ report_define("memory usage before: %s",statistics.memused())
end
-- name is now resolved and size is scaled cf sa/mo
local lookup, name, sub, method, detail = get_specification(str or "")
@@ -369,11 +371,11 @@ function define.command_2(global,cs,str,size,classfeatures,fontfeatures,classfal
end
local tfmdata = define.read(specification,size) -- id not yet known
if not tfmdata then
- logs.report("define font","unable to define %s as \\%s",name,cs)
+ report_define("unable to define %s as \\%s",name,cs)
texsetcount("global","lastfontid",-1)
elseif type(tfmdata) == "number" then
if trace_defining then
- logs.report("define font","reusing %s with id %s as \\%s (features: %s/%s, fallbacks: %s/%s)",name,tfmdata,cs,classfeatures,fontfeatures,classfallbacks,fontfallbacks)
+ report_define("reusing %s with id %s as \\%s (features: %s/%s, fallbacks: %s/%s)",name,tfmdata,cs,classfeatures,fontfeatures,classfallbacks,fontfallbacks)
end
tex.definefont(global,cs,tfmdata)
-- resolved (when designsize is used):
@@ -388,7 +390,7 @@ function define.command_2(global,cs,str,size,classfeatures,fontfeatures,classfal
tex.definefont(global,cs,id)
tfm.cleanup_table(tfmdata)
if trace_defining then
- logs.report("define font","defining %s with id %s as \\%s (features: %s/%s, fallbacks: %s/%s)",name,id,cs,classfeatures,fontfeatures,classfallbacks,fontfallbacks)
+ report_define("defining %s with id %s as \\%s (features: %s/%s, fallbacks: %s/%s)",name,id,cs,classfeatures,fontfeatures,classfallbacks,fontfallbacks)
end
-- resolved (when designsize is used):
texsprint(ctxcatcodes,format("\\def\\somefontsize{%isp}",tfmdata.size))
@@ -398,7 +400,7 @@ function define.command_2(global,cs,str,size,classfeatures,fontfeatures,classfal
texsetcount("global","lastfontid",id)
end
if trace_defining then
- logs.report("define font","memory usage after: %s",statistics.memused())
+ report_define("memory usage after: %s",statistics.memused())
end
statistics.stoptiming(fonts)
end
@@ -468,7 +470,7 @@ end
-- for the moment here, this will become a chain of extras that is
-- hooked into the ctx registration (or scaler or ...)
-function fonts.set_digit_width(font)
+function fonts.set_digit_width(font) -- max(quad/2,wd(0..9))
local tfmtable = fontdata[font]
local parameters = tfmtable.parameters
local width = parameters.digitwidth
@@ -560,29 +562,6 @@ function fonts.char(n) -- todo: afm en tfm
end
end
--- moved from ini:
-
-fonts.color = { } -- dummy in ini
-
-local attribute = attributes.private('color')
-local mapping = (attributes and attributes.list[attribute]) or { }
-
-local set_attribute = node.set_attribute
-local unset_attribute = node.unset_attribute
-
-function fonts.color.set(n,c)
- local mc = mapping[c]
- if not mc then
- unset_attribute(n,attribute)
- else
- set_attribute(n,attribute,mc)
- end
-end
-
-function fonts.color.reset(n)
- unset_attribute(n,attribute)
-end
-
-- this will become obsolete:
fonts.otf.name_to_slot = name_to_slot
@@ -622,3 +601,4 @@ function fonts.show_font_parameters()
end
end
end
+
diff --git a/tex/context/base/font-def.lua b/tex/context/base/font-def.lua
index c3b10162c..a35c4856f 100644
--- a/tex/context/base/font-def.lua
+++ b/tex/context/base/font-def.lua
@@ -16,6 +16,9 @@ local directive_embedall = false directives.register("fonts.embedall", function
trackers.register("fonts.loading", "fonts.defining", "otf.loading", "afm.loading", "tfm.loading")
trackers.register("fonts.all", "fonts.*", "otf.*", "afm.*", "tfm.*")
+local report_define = logs.new("define fonts")
+local report_afm = logs.new("load afm")
+
--[[ldx--
<p>Here we deal with defining fonts. We do so by intercepting the
default loader that only handles <l n='tfm'/>.</p>
@@ -120,7 +123,7 @@ end
function define.makespecification(specification, lookup, name, sub, method, detail, size)
size = size or 655360
if trace_defining then
- logs.report("define font","%s -> lookup: %s, name: %s, sub: %s, method: %s, detail: %s",
+ report_define("%s -> lookup: %s, name: %s, sub: %s, method: %s, detail: %s",
specification, (lookup ~= "" and lookup) or "[file]", (name ~= "" and name) or "-",
(sub ~= "" and sub) or "-", (method ~= "" and method) or "-", (detail ~= "" and detail) or "-")
end
@@ -233,18 +236,29 @@ end
define.resolvers = resolvers
+-- todo: reporter
+
function define.resolvers.file(specification)
- specification.forced = file.extname(specification.name)
- specification.name = file.removesuffix(specification.name)
+ local suffix = file.suffix(specification.name)
+ if fonts.formats[suffix] then
+ specification.forced = suffix
+ specification.name = file.removesuffix(specification.name)
+ end
end
function define.resolvers.name(specification)
local resolve = fonts.names.resolve
if resolve then
- specification.resolved, specification.sub = fonts.names.resolve(specification.name,specification.sub)
- if specification.resolved then
- specification.forced = file.extname(specification.resolved)
- specification.name = file.removesuffix(specification.resolved)
+ local resolved, sub = fonts.names.resolve(specification.name,specification.sub)
+ specification.resolved, specification.sub = resolved, sub
+ if resolved then
+ local suffix = file.suffix(resolved)
+ if fonts.formats[suffix] then
+ specification.forced = suffix
+ specification.name = file.removesuffix(resolved)
+ else
+ specification.name = resolved
+ end
end
else
define.resolvers.file(specification)
@@ -307,14 +321,14 @@ function tfm.read(specification)
if forced ~= "" then
tfmtable = readers[lower(forced)](specification)
if not tfmtable then
- logs.report("define font","forced type %s of %s not found",forced,specification.name)
+ report_define("forced type %s of %s not found",forced,specification.name)
end
else
for s=1,#sequence do -- reader sequence
local reader = sequence[s]
if readers[reader] then -- not really needed
if trace_defining then
- logs.report("define font","trying (reader sequence driven) type %s for %s with file %s",reader,specification.name,specification.filename or "unknown")
+ report_define("trying (reader sequence driven) type %s for %s with file %s",reader,specification.name,specification.filename or "unknown")
end
tfmtable = readers[reader](specification)
if tfmtable then
@@ -339,7 +353,7 @@ function tfm.read(specification)
end
end
if not tfmtable then
- logs.report("define font","font with name %s is not found",specification.name)
+ report_define("font with name %s is not found",specification.name)
end
return tfmtable
end
@@ -399,7 +413,7 @@ local function check_afm(specification,fullname)
foundname = shortname
-- tfm.set_normal_feature(specification,'encoding',encoding) -- will go away
if trace_loading then
- logs.report("load afm","stripping encoding prefix from filename %s",afmname)
+ report_afm("stripping encoding prefix from filename %s",afmname)
end
end
end
@@ -456,7 +470,7 @@ end
local function check_otf(forced,specification,suffix,what)
local name = specification.name
if forced then
- name = file.addsuffix(name,suffix)
+ name = file.addsuffix(name,suffix,true)
end
local fullname, tfmtable = resolvers.findbinfile(name,suffix) or "", nil -- one shot
if fullname == "" then
@@ -532,7 +546,7 @@ function define.register(fontdata,id)
local hash = fontdata.hash
if not tfm.internalized[hash] then
if trace_defining then
- logs.report("define font","loading at 2 id %s, hash: %s",id or "?",hash or "?")
+ report_define("loading at 2 id %s, hash: %s",id or "?",hash or "?")
end
fonts.identifiers[id] = fontdata
fonts.characters [id] = fontdata.characters
@@ -578,7 +592,7 @@ function define.read(specification,size,id) -- id can be optional, name can alre
specification = define.resolve(specification)
local hash = tfm.hash_instance(specification)
if cache_them then
- local fontdata = containers.read(fonts.cache(),hash) -- for tracing purposes
+ local fontdata = containers.read(fonts.cache,hash) -- for tracing purposes
end
local fontdata = define.registered(hash) -- id
if not fontdata then
@@ -591,7 +605,7 @@ function define.read(specification,size,id) -- id can be optional, name can alre
end
end
if cache_them then
- fontdata = containers.write(fonts.cache(),hash,fontdata) -- for tracing purposes
+ fontdata = containers.write(fonts.cache,hash,fontdata) -- for tracing purposes
end
if fontdata then
fontdata.hash = hash
@@ -603,9 +617,9 @@ function define.read(specification,size,id) -- id can be optional, name can alre
end
define.last = fontdata or id -- todo ! ! ! ! !
if not fontdata then
- logs.report("define font", "unknown font %s, loading aborted",specification.name)
+ report_define( "unknown font %s, loading aborted",specification.name)
elseif trace_defining and type(fontdata) == "table" then
- logs.report("define font","using %s font with id %s, name:%s size:%s bytes:%s encoding:%s fullname:%s filename:%s",
+ report_define("using %s font with id %s, name:%s size:%s bytes:%s encoding:%s fullname:%s filename:%s",
fontdata.type or "unknown",
id or "?",
fontdata.name or "?",
@@ -626,18 +640,18 @@ function vf.find(name)
local format = fonts.logger.format(name)
if format == 'tfm' or format == 'ofm' then
if trace_defining then
- logs.report("define font","locating vf for %s",name)
+ report_define("locating vf for %s",name)
end
return resolvers.findbinfile(name,"ovf")
else
if trace_defining then
- logs.report("define font","vf for %s is already taken care of",name)
+ report_define("vf for %s is already taken care of",name)
end
return nil -- ""
end
else
if trace_defining then
- logs.report("define font","locating vf for %s",name)
+ report_define("locating vf for %s",name)
end
return resolvers.findbinfile(name,"ovf")
end
diff --git a/tex/context/base/font-enc.lua b/tex/context/base/font-enc.lua
index 874f7c3f4..71dea4327 100644
--- a/tex/context/base/font-enc.lua
+++ b/tex/context/base/font-enc.lua
@@ -13,11 +13,14 @@ local match, gmatch, gsub = string.match, string.gmatch, string.gsub
them in tables. But we may do so some day, for consistency.</p>
--ldx]]--
-fonts.enc = fonts.enc or { }
-fonts.enc.version = 1.03
-fonts.enc.cache = containers.define("fonts", "enc", fonts.enc.version, true)
+fonts.enc = fonts.enc or { }
-fonts.enc.known = { -- sort of obsolete
+local enc = fonts.enc
+
+enc.version = 1.03
+enc.cache = containers.define("fonts", "enc", fonts.enc.version, true)
+
+enc.known = { -- sort of obsolete
texnansi = true,
ec = true,
qx = true,
@@ -28,8 +31,8 @@ fonts.enc.known = { -- sort of obsolete
unicode = true
}
-function fonts.enc.is_known(encoding)
- return containers.is_valid(fonts.enc.cache(),encoding)
+function enc.is_known(encoding)
+ return containers.is_valid(enc.cache,encoding)
end
--[[ldx--
@@ -51,14 +54,14 @@ Latin Modern or <l n='tex'> Gyre) come in OpenType variants too, so these
will be used.</p>
--ldx]]--
-function fonts.enc.load(filename)
+function enc.load(filename)
local name = file.removesuffix(filename)
- local data = containers.read(fonts.enc.cache(),name)
+ local data = containers.read(enc.cache,name)
if data then
return data
end
if name == "unicode" then
- data = fonts.enc.make_unicode_vector() -- special case, no tex file for this
+ data = enc.make_unicode_vector() -- special case, no tex file for this
end
if data then
return data
@@ -95,7 +98,7 @@ function fonts.enc.load(filename)
hash=hash,
unicodes=unicodes
}
- return containers.write(fonts.enc.cache(), name, data)
+ return containers.write(enc.cache, name, data)
end
--[[ldx--
@@ -105,7 +108,7 @@ one.</p>
-- maybe make this a function:
-function fonts.enc.make_unicode_vector()
+function enc.make_unicode_vector()
local vector, hash = { }, { }
for code, v in next, characters.data do
local name = v.adobename
@@ -118,5 +121,5 @@ function fonts.enc.make_unicode_vector()
for name, code in next, characters.synonyms do
vector[code], hash[name] = name, code
end
- return containers.write(fonts.enc.cache(), 'unicode', { name='unicode', tag='unicode', vector=vector, hash=hash })
+ return containers.write(enc.cache, 'unicode', { name='unicode', tag='unicode', vector=vector, hash=hash })
end
diff --git a/tex/context/base/font-enh.lua b/tex/context/base/font-enh.lua
index fc70c04c5..137044f5b 100644
--- a/tex/context/base/font-enh.lua
+++ b/tex/context/base/font-enh.lua
@@ -10,6 +10,8 @@ local next, match = next, string.match
local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
+local report_define = logs.new("define fonts")
+
-- tfmdata has also fast access to indices and unicodes
-- to be checked: otf -> tfm -> tfmscaled
--
@@ -76,7 +78,7 @@ function tfm.set_features(tfmdata)
local value = features[f]
if value and fi.tfm[f] then -- brr
if tfm.trace_features then
- logs.report("define font","initializing feature %s to %s for mode %s for font %s",f,tostring(value),mode or 'unknown',tfmdata.name or 'unknown')
+ report_define("initializing feature %s to %s for mode %s for font %s",f,tostring(value),mode or 'unknown',tfmdata.name or 'unknown')
end
fi.tfm[f](tfmdata,value)
mode = tfmdata.mode or fonts.mode
@@ -127,7 +129,7 @@ function tfm.reencode(tfmdata,encoding)
for k,v in next, data.unicodes do
if k ~= v then
if trace_defining then
- logs.report("define font","reencoding U+%04X to U+%04X",k,v)
+ report_define("reencoding U+%04X to U+%04X",k,v)
end
characters[k] = original[v]
end
@@ -154,7 +156,7 @@ function tfm.remap(tfmdata,remapping)
for k,v in next, vector do
if k ~= v then
if trace_defining then
- logs.report("define font","remapping U+%04X to U+%04X",k,v)
+ report_define("remapping U+%04X to U+%04X",k,v)
end
local c = original[k]
characters[v] = c
@@ -191,7 +193,7 @@ fonts.initializers.node.tfm.remap = tfm.remap
--~ for k,v in next, data.unicodes do
--~ if k ~= v then
--~ if trace_defining then
---~ logs.report("define font","mapping %s onto %s",k,v)
+--~ report_define("mapping %s onto %s",k,v)
--~ end
--~ characters[k] = original[v]
--~ end
diff --git a/tex/context/base/font-ext.lua b/tex/context/base/font-ext.lua
index 05bdaf2fc..00fdc26a1 100644
--- a/tex/context/base/font-ext.lua
+++ b/tex/context/base/font-ext.lua
@@ -14,6 +14,8 @@ local utfchar = utf.char
local trace_protrusion = false trackers.register("fonts.protrusion", function(v) trace_protrusion = v end)
local trace_expansion = false trackers.register("fonts.expansion", function(v) trace_expansion = v end)
+local report_fonts = logs.new("fonts")
+
commands = commands or { }
--[[ldx--
@@ -168,7 +170,7 @@ function initializers.common.expansion(tfmdata,value)
if vector then
local stretch, shrink, step, factor = class.stretch or 0, class.shrink or 0, class.step or 0, class.factor or 1
if trace_expansion then
- logs.report("fonts","set expansion class %s, vector: %s, factor: %s, stretch: %s, shrink: %s, step: %s",value,class_vector,factor,stretch,shrink,step)
+ report_fonts("set expansion class %s, vector: %s, factor: %s, stretch: %s, shrink: %s, step: %s",value,class_vector,factor,stretch,shrink,step)
end
tfmdata.stretch, tfmdata.shrink, tfmdata.step, tfmdata.auto_expand = stretch * 10, shrink * 10, step * 10, true
local data = characters and characters.data
@@ -194,10 +196,10 @@ function initializers.common.expansion(tfmdata,value)
end
end
elseif trace_expansion then
- logs.report("fonts","unknown expansion vector '%s' in class '%s",class_vector,value)
+ report_fonts("unknown expansion vector '%s' in class '%s",class_vector,value)
end
elseif trace_expansion then
- logs.report("fonts","unknown expansion class '%s'",value)
+ report_fonts("unknown expansion class '%s'",value)
end
end
end
@@ -212,6 +214,8 @@ initializers.node.afm.expansion = initializers.common.expansion
fonts.goodies.register("expansions", function(...) return fonts.goodies.report("expansions", trace_expansion, ...) end)
+local report_opbd = logs.new("otf opbd")
+
-- -- -- -- -- --
-- protrusion
-- -- -- -- -- --
@@ -381,14 +385,14 @@ local function map_opbd_onto_protrusion(tfmdata,value,opbd)
local data = singles[lookup]
if data then
if trace_protrusion then
- logs.report("fonts","set left protrusion using lfbd lookup '%s'",lookup)
+ report_fonts("set left protrusion using lfbd lookup '%s'",lookup)
end
for k, v in next, data do
-- local p = - v[3] / descriptions[k].width-- or 1 ~= 0 too but the same
local p = - (v[1] / 1000) * factor * left
characters[k].left_protruding = p
if trace_protrusion then
- logs.report("opbd","lfbd -> %s -> 0x%05X (%s) -> %0.03f (%s)",lookup,k,utfchar(k),p,concat(v," "))
+ report_protrusions("lfbd -> %s -> 0x%05X (%s) -> %0.03f (%s)",lookup,k,utfchar(k),p,concat(v," "))
end
end
done = true
@@ -404,14 +408,14 @@ local function map_opbd_onto_protrusion(tfmdata,value,opbd)
local data = singles[lookup]
if data then
if trace_protrusion then
- logs.report("fonts","set right protrusion using rtbd lookup '%s'",lookup)
+ report_fonts("set right protrusion using rtbd lookup '%s'",lookup)
end
for k, v in next, data do
-- local p = v[3] / descriptions[k].width -- or 3
local p = (v[1] / 1000) * factor * right
characters[k].right_protruding = p
if trace_protrusion then
- logs.report("opbd","rtbd -> %s -> 0x%05X (%s) -> %0.03f (%s)",lookup,k,utfchar(k),p,concat(v," "))
+ report_protrusions("rtbd -> %s -> 0x%05X (%s) -> %0.03f (%s)",lookup,k,utfchar(k),p,concat(v," "))
end
end
end
@@ -441,7 +445,7 @@ function initializers.common.protrusion(tfmdata,value)
local left = class.left or 1
local right = class.right or 1
if trace_protrusion then
- logs.report("fonts","set protrusion class %s, vector: %s, factor: %s, left: %s, right: %s",value,class_vector,factor,left,right)
+ report_fonts("set protrusion class %s, vector: %s, factor: %s, left: %s, right: %s",value,class_vector,factor,left,right)
end
local data = characters.data
local emwidth = tfmdata.parameters.quad
@@ -476,10 +480,10 @@ function initializers.common.protrusion(tfmdata,value)
end
end
elseif trace_protrusion then
- logs.report("fonts","unknown protrusion vector '%s' in class '%s",class_vector,value)
+ report_fonts("unknown protrusion vector '%s' in class '%s",class_vector,value)
end
elseif trace_protrusion then
- logs.report("fonts","unknown protrusion class '%s'",value)
+ report_fonts("unknown protrusion class '%s'",value)
end
end
end
diff --git a/tex/context/base/font-fbk.lua b/tex/context/base/font-fbk.lua
index 1ad1cc781..3341a1e72 100644
--- a/tex/context/base/font-fbk.lua
+++ b/tex/context/base/font-fbk.lua
@@ -169,7 +169,7 @@ function vf.aux.compose_characters(g) -- todo: scaling depends on call location
end
if charsacc then
local chr_t = cache[chr]
- if not cht_t then
+ if not chr_t then
chr_t = {"slot", 1, chr}
cache[chr] = chr_t
end
diff --git a/tex/context/base/font-gds.lua b/tex/context/base/font-gds.lua
index e3db8c816..e17a47ca2 100644
--- a/tex/context/base/font-gds.lua
+++ b/tex/context/base/font-gds.lua
@@ -11,6 +11,8 @@ local gmatch = string.gmatch
local trace_goodies = false trackers.register("fonts.goodies", function(v) trace_goodies = v end)
+local report_fonts = logs.new("fonts")
+
-- goodies=name,colorscheme=,featureset=
--
-- goodies=auto
@@ -28,7 +30,7 @@ function fonts.goodies.report(what,trace,goodies)
if trace_goodies or trace then
local whatever = goodies[what]
if whatever then
- logs.report("fonts", "goodie '%s' found in '%s'",what,goodies.name)
+ report_fonts("goodie '%s' found in '%s'",what,goodies.name)
end
end
end
@@ -43,15 +45,15 @@ local function getgoodies(filename) -- maybe a merge is better
fullname = resolvers.find_file(file.addsuffix(filename,"lua")) or "" -- fallback suffix
end
if fullname == "" then
- logs.report("fonts", "goodie file '%s.lfg' is not found",filename)
+ report_fonts("goodie file '%s.lfg' is not found",filename)
data[filename] = false -- signal for not found
else
goodies = dofile(fullname) or false
if not goodies then
- logs.report("fonts", "goodie file '%s' is invalid",fullname)
+ report_fonts("goodie file '%s' is invalid",fullname)
return nil
elseif trace_goodies then
- logs.report("fonts", "goodie file '%s' is loaded",fullname)
+ report_fonts("goodie file '%s' is loaded",fullname)
end
goodies.name = goodies.name or "no name"
for name, fnc in next, list do
@@ -120,7 +122,7 @@ function fonts.goodies.prepare_features(goodies,name,set)
local n, s = preset_context(fullname,"",ff)
goodies.featuresets[name] = s -- set
if trace_goodies then
- logs.report("fonts", "feature set '%s' gets number %s and name '%s'",name,n,fullname)
+ report_fonts("feature set '%s' gets number %s and name '%s'",name,n,fullname)
end
return n
end
@@ -131,7 +133,7 @@ local function initialize(goodies,tfmdata)
local goodiesname = goodies.name
if featuresets then
if trace_goodies then
- logs.report("fonts", "checking featuresets in '%s'",goodies.name)
+ report_fonts("checking featuresets in '%s'",goodies.name)
end
for name, set in next, featuresets do
fonts.goodies.prepare_features(goodies,name,set)
@@ -211,6 +213,7 @@ local glyph = node.id("glyph")
function fonts.goodies.colorschemes.coloring(head)
local lastfont, lastscheme
+ local done = false
for n in traverse_id(glyph,head) do
local a = has_attribute(n,a_colorscheme)
if a then
@@ -221,11 +224,13 @@ function fonts.goodies.colorschemes.coloring(head)
if lastscheme then
local sc = lastscheme[n.char]
if sc then
+ done = true
fcs(n,"colorscheme:"..a..":"..sc) -- slow
end
end
end
end
+ return head, done
end
function fonts.goodies.colorschemes.enable()
diff --git a/tex/context/base/font-gds.mkiv b/tex/context/base/font-gds.mkiv
index e36116283..1677aaa97 100644
--- a/tex/context/base/font-gds.mkiv
+++ b/tex/context/base/font-gds.mkiv
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Font Support / Colorschemes}
-% \registerctxluafile{font-gds}{1.001}
+%registerctxluafile{font-gds}{1.001}
\unprotect
diff --git a/tex/context/base/font-ini.lua b/tex/context/base/font-ini.lua
index e45149781..296af06e1 100644
--- a/tex/context/base/font-ini.lua
+++ b/tex/context/base/font-ini.lua
@@ -13,6 +13,9 @@ if not modules then modules = { } end modules ['font-ini'] = {
local utf = unicode.utf8
local format, serialize = string.format, table.serialize
local write_nl = texio.write_nl
+local lower = string.lower
+
+local report_define = logs.new("define fonts")
if not fontloader then fontloader = fontforge end
@@ -84,12 +87,12 @@ end
fonts.formats = { }
function fonts.fontformat(filename,default)
- local extname = file.extname(filename)
+ local extname = lower(file.extname(filename))
local format = fonts.formats[extname]
if format then
return format
else
- logs.report("fonts define","unable to detemine font format for '%s'",filename)
+ report_define("unable to determine font format for '%s'",filename)
return default
end
end
diff --git a/tex/context/base/font-ini.mkii b/tex/context/base/font-ini.mkii
index 89fbb5d07..6901adc4c 100644
--- a/tex/context/base/font-ini.mkii
+++ b/tex/context/base/font-ini.mkii
@@ -639,7 +639,7 @@
% \def\normalmbox
% {\dowithnextboxcontent\mf\flushnextbox\normalhbox}
-\def\mbox
+\def\mbox % we cannot add \dontleavehmode ... else no \setbox0\mbox possible
{\ifmmode\normalmbox\else\normalhbox\fi}
\def\enablembox
diff --git a/tex/context/base/font-ini.mkiv b/tex/context/base/font-ini.mkiv
index c7d515cca..a6e41d7b3 100644
--- a/tex/context/base/font-ini.mkiv
+++ b/tex/context/base/font-ini.mkiv
@@ -55,6 +55,7 @@
\writestatus{loading}{ConTeXt Font Macros / Initialization}
\registerctxluafile{font-ini}{1.001}
+\registerctxluafile{font-clr}{1.001}
\registerctxluafile{node-fnt}{1.001} % here
\registerctxluafile{font-enc}{1.001}
\registerctxluafile{font-map}{1.001}
@@ -1334,24 +1335,81 @@
\newcount\@@fontdefhack % check if this is still needed
+% \def\@@beginfontdef
+% {\ifcase\@@fontdefhack
+% \let\k!savedtext \k!text \let\k!text \s!text
+% \let\k!saveddefault \k!default \let\k!default \s!default
+% \fi
+% \advance\@@fontdefhack \plusone }
+
+% \def\@@endfontdef
+% {\advance\@@fontdefhack \minusone
+% \ifcase\@@fontdefhack
+% \let\k!default \k!saveddefault
+% \let\k!text \k!savedtext
+% \fi}
+
+%%%%%%%%%%%%%%%%%%%%%%%%
+
+% The problem is that the key in a getparameters is resolved
+% to the underlying interface language (english). But values
+% are kept as they are. However, some of the keys in a font
+% definition are used as values later on.
+%
+% The only place where this happens (for historical reason mostly)
+% is in the bodyfont definitions and setup, so we can use a limited
+% case.
+%
+% \let \c!big \v!big : expansion time (user)
+% \let \c!small \v!small : expansion time (user)
+% \let \c!text \s!text : definition time
+% % \c!script \s!script : definition time
+% % \c!scriptscript \s!scriptscript : definition time
+% % \c!em : definition time
+% % \c!interlineskip : definition time
+% \let \c!default \s!default : definition time
+% \let \k!default \s!default : definition time
+%
+% Doing the k! definitions local will save us 500 has entries.
+
+\letvalue{\k!prefix!\v!big }\c!big
+\letvalue{\k!prefix!\v!small }\c!small
+\letvalue{\k!prefix!\v!text }\s!text
+\letvalue{\k!prefix!\v!default}\s!default
+
+\let\normalc!big \c!big
+\let\normalc!small \c!small
+\let\normalc!text \c!text % should not happen as we expect text
+\let\normalc!default \c!default
+\let\normalk!default \k!default
+
+\newtoks\everybeginfontdef
+\newtoks\everyendfontdef
+
+\appendtoks
+ \let\c!text \s!text
+ \let\c!default\s!default
+\to \everybeginfontdef
+
+\appendtoks
+ \let\c!text \normalc!text
+ \let\c!default\normalc!default
+\to \everyendfontdef
+
\def\@@beginfontdef
{\ifcase\@@fontdefhack
- \let\k!savedtext \k!text \let\k!text \s!text
- \let\k!k!savedtext \k!k!text \let\k!k!text \!!plusone
- \let\k!saveddefault \k!default \let\k!default \s!default
- \let\k!k!saveddefault\k!k!default \let\k!k!default \!!plusone
+ \the\everybeginfontdef
\fi
- \advance\@@fontdefhack \plusone }
+ \advance\@@fontdefhack\plusone}
\def\@@endfontdef
{\advance\@@fontdefhack \minusone
\ifcase\@@fontdefhack
- \let\k!k!default\k!k!saveddefault
- \let\k!default \k!saveddefault
- \let\k!k!text \k!k!savedtext
- \let\k!text \k!savedtext
+ \the\everyendfontdef
\fi}
+%%%%%%%%%%%%%%%%%%%%%%%%
+
\unexpanded\def\definebodyfontenvironment
{\dotripleempty\dodefinebodyfontenvironment}
@@ -1375,7 +1433,7 @@
\fi}
\def\dododefinebodyfontenvironment[#1][#2][#3]% size class settings
- {\@@beginfontdef % \s!text goes wrong in testing because the 12pt alternative will called when typesetting the test (or so)
+ {%\@@beginfontdef % \s!text goes wrong in testing because the 12pt alternative will called when typesetting the test (or so)
\ifcsname\??ft#2#1\c!em\endcsname
% we test for em as we assume it to be set
\else
@@ -1389,7 +1447,7 @@
[\c!interlinespace,\c!em]%
\fi
\getparameters[\??ft#2#1][#3]%
- \@@endfontdef
+ %\@@endfontdef
% new code, see remark
\ifloadingfonts
% only runtime
@@ -1705,9 +1763,10 @@
\defineunknownfont{\csname\??ft#1#2\endcsname}%
\fi}
+\let\c!savedtext\c!text
+
\unexpanded\def\defineunknownfont#1%
- {\let\c!savedtext\c!text
- \let\c!text\s!text
+ {\let\c!text\s!text
\donefalse
\processcommacommand[\fontrelativesizelist]{\dodefineunknownfont{#1}}%
\let\c!text\c!savedtext
@@ -2644,7 +2703,7 @@
[liga=yes,kern=yes,compose=yes,tlig=yes,trep=yes]
\definefontfeature
- [arabic]
+ [arabic] % this will become obsolete
[mode=node,language=dflt,script=arab,ccmp=yes,
init=yes,medi=yes,fina=yes,isol=yes,
liga=yes,dlig=yes,rlig=yes,clig=yes,calt=yes,
@@ -2656,7 +2715,7 @@
\definefontfeature
[virtualmath]
- [mode=base,liga=yes,kern=yes,tlig=yes,trep=yes]
+ [mode=base,liga=yes,kern=yes,tlig=yes,trep=yes,language=dflt,script=math]
% for the moment here, this will change but we need it for mk.tex
diff --git a/tex/context/base/font-log.lua b/tex/context/base/font-log.lua
index 97cb4ff7c..2e7c53e43 100644
--- a/tex/context/base/font-log.lua
+++ b/tex/context/base/font-log.lua
@@ -10,6 +10,8 @@ local next, format, lower, concat = next, string.format, string.lower, table.con
local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
+local report_define = logs.new("define fonts")
+
fonts.logger = fonts.logger or { }
--[[ldx--
@@ -23,7 +25,7 @@ function fonts.logger.save(tfmtable,source,specification) -- save file name in s
if tfmtable and specification and specification.specification then
local name = lower(specification.name)
if trace_defining and not fonts.used[name] then
- logs.report("define font","registering %s as %s (used: %s)",file.basename(specification.name),source,file.basename(specification.filename))
+ report_define("registering %s as %s (used: %s)",file.basename(specification.name),source,file.basename(specification.filename))
end
specification.source = source
fonts.loaded[lower(specification.specification)] = specification
@@ -51,7 +53,7 @@ end
statistics.register("loaded fonts", function()
if next(fonts.used) then
local t = fonts.logger.report()
- return (#t > 0 and format("%s files: %s",#t,concat(t,separator or " "))) or "none"
+ return (#t > 0 and format("%s files: %s",#t,concat(t," "))) or "none"
else
return nil
end
diff --git a/tex/context/base/font-map.lua b/tex/context/base/font-map.lua
index 299508764..faf38d7f6 100644
--- a/tex/context/base/font-map.lua
+++ b/tex/context/base/font-map.lua
@@ -14,6 +14,8 @@ local utfbyte = utf.byte
local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
local trace_unimapping = false trackers.register("otf.unimapping", function(v) trace_unimapping = v end)
+local report_otf = logs.new("load otf")
+
local ctxcatcodes = tex and tex.ctxcatcodes
--[[ldx--
@@ -30,7 +32,7 @@ local function load_lum_table(filename) -- will move to font goodies
local lumfile = resolvers.find_file(lumname,"map") or ""
if lumfile ~= "" and lfs.isfile(lumfile) then
if trace_loading or trace_unimapping then
- logs.report("load otf","enhance: loading %s ",lumfile)
+ report_otf("enhance: loading %s ",lumfile)
end
lumunic = dofile(lumfile)
return lumunic, lumfile
@@ -255,14 +257,14 @@ fonts.map.add_to_unicode = function(data,filename)
for index, glyph in table.sortedhash(data.glyphs) do
local toun, name, unic = tounicode[index], glyph.name, glyph.unicode or -1 -- play safe
if toun then
- logs.report("load otf","internal: 0x%05X, name: %s, unicode: 0x%05X, tounicode: %s",index,name,unic,toun)
+ report_otf("internal: 0x%05X, name: %s, unicode: 0x%05X, tounicode: %s",index,name,unic,toun)
else
- logs.report("load otf","internal: 0x%05X, name: %s, unicode: 0x%05X",index,name,unic)
+ report_otf("internal: 0x%05X, name: %s, unicode: 0x%05X",index,name,unic)
end
end
end
if trace_loading and (ns > 0 or nl > 0) then
- logs.report("load otf","enhance: %s tounicode entries added (%s ligatures)",nl+ns, ns)
+ report_otf("enhance: %s tounicode entries added (%s ligatures)",nl+ns, ns)
end
end
diff --git a/tex/context/base/font-mis.lua b/tex/context/base/font-mis.lua
index 80a56332a..7a2653856 100644
--- a/tex/context/base/font-mis.lua
+++ b/tex/context/base/font-mis.lua
@@ -11,7 +11,7 @@ local lower, strip = string.lower, string.strip
fonts.otf = fonts.otf or { }
-fonts.otf.version = fonts.otf.version or 2.650
+fonts.otf.version = fonts.otf.version or 2.653
fonts.otf.pack = true
fonts.otf.cache = containers.define("fonts", "otf", fonts.otf.version, true)
@@ -24,7 +24,7 @@ function fonts.otf.loadcached(filename,format,sub)
hash = hash .. "-" .. sub
end
hash = containers.cleanname(hash)
- local data = containers.read(fonts.otf.cache(), hash)
+ local data = containers.read(fonts.otf.cache, hash)
if data and not data.verbose then
fonts.otf.enhancers.unpack(data)
return data
diff --git a/tex/context/base/font-ota.lua b/tex/context/base/font-ota.lua
index 0b61e17d1..0e5b55542 100644
--- a/tex/context/base/font-ota.lua
+++ b/tex/context/base/font-ota.lua
@@ -51,6 +51,7 @@ local a_to_language = otf.a_to_language
-- somewhat slower; and .. we need a chain of them
function fonts.initializers.node.otf.analyze(tfmdata,value,attr)
+ local script, language
if attr and attr > 0 then
script, language = a_to_script[attr], a_to_language[attr]
else
diff --git a/tex/context/base/font-otb.lua b/tex/context/base/font-otb.lua
index a3d347737..65933b240 100644
--- a/tex/context/base/font-otb.lua
+++ b/tex/context/base/font-otb.lua
@@ -22,6 +22,8 @@ local trace_ligatures = false trackers.register("otf.ligatures", function
local trace_kerns = false trackers.register("otf.kerns", function(v) trace_kerns = v end)
local trace_preparing = false trackers.register("otf.preparing", function(v) trace_preparing = v end)
+local report_prepare = logs.new("otf prepare")
+
local wildcard = "*"
local default = "dflt"
@@ -41,8 +43,20 @@ local function gref(descriptions,n)
local num, nam = { }, { }
for i=1,#n do
local ni = n[i]
- num[i] = format("U+%04X",ni)
- nam[i] = descriptions[ni].name or "?"
+ -- ! ! ! could be a helper ! ! !
+ if type(ni) == "table" then
+ local nnum, nnam = { }, { }
+ for j=1,#ni do
+ local nj = ni[j]
+ nnum[j] = format("U+%04X",nj)
+ nnam[j] = descriptions[nj].name or "?"
+ end
+ num[i] = concat(nnum,"|")
+ nam[i] = concat(nnam,"|")
+ else
+ num[i] = format("U+%04X",ni)
+ nam[i] = descriptions[ni].name or "?"
+ end
end
return format("%s (%s)",concat(num," "), concat(nam," "))
else
@@ -76,7 +90,7 @@ local function resolve_ligatures(tfmdata,ligatures,kind)
local c, f, s = characters[uc], ligs[1], ligs[2]
local uft, ust = unicodes[f] or 0, unicodes[s] or 0
if not uft or not ust then
- logs.report("define otf","%s: unicode problem with base ligature %s = %s + %s",cref(kind),gref(descriptions,uc),gref(descriptions,uft),gref(descriptions,ust))
+ report_prepare("%s: unicode problem with base ligature %s = %s + %s",cref(kind),gref(descriptions,uc),gref(descriptions,uft),gref(descriptions,ust))
-- some kind of error
else
if type(uft) == "number" then uft = { uft } end
@@ -87,7 +101,7 @@ local function resolve_ligatures(tfmdata,ligatures,kind)
local us = ust[usi]
if changed[uf] or changed[us] then
if trace_baseinit and trace_ligatures then
- logs.report("define otf","%s: base ligature %s + %s ignored",cref(kind),gref(descriptions,uf),gref(descriptions,us))
+ report_prepare("%s: base ligature %s + %s ignored",cref(kind),gref(descriptions,uf),gref(descriptions,us))
end
else
local first, second = characters[uf], us
@@ -103,7 +117,7 @@ local function resolve_ligatures(tfmdata,ligatures,kind)
t[second] = { type = 0, char = uc[1] } -- can this still happen?
end
if trace_baseinit and trace_ligatures then
- logs.report("define otf","%s: base ligature %s + %s => %s",cref(kind),gref(descriptions,uf),gref(descriptions,us),gref(descriptions,uc))
+ report_prepare("%s: base ligature %s + %s => %s",cref(kind),gref(descriptions,uf),gref(descriptions,us),gref(descriptions,uc))
end
end
end
@@ -136,7 +150,7 @@ end
local splitter = lpeg.splitat(" ")
-function prepare_base_substitutions(tfmdata,kind,value) -- we can share some code with the node features
+local function prepare_base_substitutions(tfmdata,kind,value) -- we can share some code with the node features
if value then
local otfdata = tfmdata.shared.otfdata
local validlookups, lookuplist = otf.collect_lookups(otfdata,kind,tfmdata.script,tfmdata.language)
@@ -159,7 +173,7 @@ function prepare_base_substitutions(tfmdata,kind,value) -- we can share some cod
end
if characters[upv] then
if trace_baseinit and trace_singles then
- logs.report("define otf","%s: base substitution %s => %s",cref(kind,lookup),gref(descriptions,k),gref(descriptions,upv))
+ report_prepare("%s: base substitution %s => %s",cref(kind,lookup),gref(descriptions,k),gref(descriptions,upv))
end
changed[k] = upv
end
@@ -187,7 +201,7 @@ function prepare_base_substitutions(tfmdata,kind,value) -- we can share some cod
end
if characters[upc] then
if trace_baseinit and trace_alternatives then
- logs.report("define otf","%s: base alternate %s %s => %s",cref(kind,lookup),tostring(value),gref(descriptions,k),gref(descriptions,upc))
+ report_prepare("%s: base alternate %s %s => %s",cref(kind,lookup),tostring(value),gref(descriptions,k),gref(descriptions,upc))
end
changed[k] = upc
end
@@ -202,7 +216,7 @@ function prepare_base_substitutions(tfmdata,kind,value) -- we can share some cod
local upc = { lpegmatch(splitter,pc) }
for i=1,#upc do upc[i] = unicodes[upc[i]] end
-- we assume that it's no table
- logs.report("define otf","%s: base ligature %s => %s",cref(kind,lookup),gref(descriptions,upc),gref(descriptions,k))
+ report_prepare("%s: base ligature %s => %s",cref(kind,lookup),gref(descriptions,upc),gref(descriptions,k))
end
ligatures[#ligatures+1] = { pc, k }
end
@@ -278,7 +292,7 @@ local function prepare_base_kerns(tfmdata,kind,value) -- todo what kind of kerns
if v ~= 0 and not t[k] then -- maybe no 0 test here
t[k], done = v, true
if trace_baseinit and trace_kerns then
- logs.report("define otf","%s: base kern %s + %s => %s",cref(kind,lookup),gref(descriptions,u),gref(descriptions,k),v)
+ report_prepare("%s: base kern %s + %s => %s",cref(kind,lookup),gref(descriptions,u),gref(descriptions,k),v)
end
end
end
@@ -364,10 +378,10 @@ function fonts.initializers.base.otf.features(tfmdata,value)
-- verbose name as long as we don't use <()<>[]{}/%> and the length
-- is < 128.
tfmdata.fullname = tfmdata.fullname .. "-" .. base -- tfmdata.psname is the original
- --~ logs.report("otf define","fullname base hash: '%s', featureset '%s'",tfmdata.fullname,hash)
+ --~ report_prepare("fullname base hash: '%s', featureset '%s'",tfmdata.fullname,hash)
end
if trace_preparing then
- logs.report("otf define","preparation time is %0.3f seconds for %s",os.clock()-t,tfmdata.fullname or "?")
+ report_prepare("preparation time is %0.3f seconds for %s",os.clock()-t,tfmdata.fullname or "?")
end
end
end
diff --git a/tex/context/base/font-otc.lua b/tex/context/base/font-otc.lua
index 357d347b1..65688e27f 100644
--- a/tex/context/base/font-otc.lua
+++ b/tex/context/base/font-otc.lua
@@ -13,6 +13,8 @@ local type, next = type, next
local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
+local report_otf = logs.new("load otf")
+
local otf = fonts.otf
local tfm = fonts.tfm
@@ -186,7 +188,7 @@ fonts.otf.enhancers["enrich with features"] = function(data,filename)
end
if done > 0 then
if trace_loading then
- logs.report("load otf","enhance: registering %s feature (%s glyphs affected)",kind,done)
+ report_otf("enhance: registering %s feature (%s glyphs affected)",kind,done)
end
end
end
diff --git a/tex/context/base/font-otd.lua b/tex/context/base/font-otd.lua
index 41e885331..1eee45aaa 100644
--- a/tex/context/base/font-otd.lua
+++ b/tex/context/base/font-otd.lua
@@ -6,7 +6,9 @@ if not modules then modules = { } end modules ['font-otd'] = {
license = "see context related readme files"
}
-local trace_dynamics = false trackers.register("otf.dynamics", function(v) trace_dynamics = v end)
+local trace_dynamics = false trackers.register("otf.dynamics", function(v) trace_dynamics = v end)
+
+local report_otf = logs.new("load otf")
fonts = fonts or { }
fonts.otf = fonts.otf or { }
@@ -24,7 +26,7 @@ local a_to_script = { } otf.a_to_script = a_to_script
local a_to_language = { } otf.a_to_language = a_to_language
function otf.set_dynamics(font,dynamics,attribute)
- features = context_setups[context_numbers[attribute]] -- can be moved to caller
+ local features = context_setups[context_numbers[attribute]] -- can be moved to caller
if features then
local script = features.script or 'dflt'
local language = features.language or 'dflt'
@@ -41,7 +43,7 @@ function otf.set_dynamics(font,dynamics,attribute)
local dsla = dsl[attribute]
if dsla then
-- if trace_dynamics then
- -- logs.report("otf define","using dynamics %s: attribute %s, script %s, language %s",context_numbers[attribute],attribute,script,language)
+ -- report_otf("using dynamics %s: attribute %s, script %s, language %s",context_numbers[attribute],attribute,script,language)
-- end
return dsla
else
@@ -60,9 +62,10 @@ function otf.set_dynamics(font,dynamics,attribute)
tfmdata.script = script
tfmdata.shared.features = { }
-- end of save
- dsla = otf.set_features(tfmdata,fonts.define.check(features,otf.features.default))
+ local set = fonts.define.check(features,otf.features.default)
+ dsla = otf.set_features(tfmdata,set)
if trace_dynamics then
- logs.report("otf define","setting dynamics %s: attribute %s, script %s, language %s",context_numbers[attribute],attribute,script,language)
+ report_otf("setting dynamics %s: attribute %s, script %s, language %s, set: %s",context_numbers[attribute],attribute,script,language,table.sequenced(set))
end
-- we need to restore some values
tfmdata.script = saved.script
diff --git a/tex/context/base/font-otf.lua b/tex/context/base/font-otf.lua
index 9cecf21f0..5749d8fd7 100644
--- a/tex/context/base/font-otf.lua
+++ b/tex/context/base/font-otf.lua
@@ -8,10 +8,11 @@ if not modules then modules = { } end modules ['font-otf'] = {
local utf = unicode.utf8
-local concat, getn, utfbyte = table.concat, table.getn, utf.byte
+local concat, utfbyte = table.concat, utf.byte
local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
local type, next, tonumber, tostring = type, next, tonumber, tostring
local abs = math.abs
+local getn = table.getn
local lpegmatch = lpeg.match
local trace_private = false trackers.register("otf.private", function(v) trace_private = v end)
@@ -22,6 +23,8 @@ local trace_sequences = false trackers.register("otf.sequences", function(v
local trace_math = false trackers.register("otf.math", function(v) trace_math = v end)
local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
+local report_otf = logs.new("load otf")
+
--~ trackers.enable("otf.loading")
--[[ldx--
@@ -81,7 +84,7 @@ otf.features.default = otf.features.default or { }
otf.enhancers = otf.enhancers or { }
otf.glists = { "gsub", "gpos" }
-otf.version = 2.650 -- beware: also sync font-mis.lua
+otf.version = 2.653 -- beware: also sync font-mis.lua
otf.pack = true -- beware: also sync font-mis.lua
otf.syncspace = true
otf.notdef = false
@@ -182,7 +185,7 @@ local function load_featurefile(ff,featurefile)
featurefile = resolvers.find_file(file.addsuffix(featurefile,'fea'),'fea')
if featurefile and featurefile ~= "" then
if trace_loading then
- logs.report("load otf", "featurefile: %s", featurefile)
+ report_otf("featurefile: %s", featurefile)
end
fontloader.apply_featurefile(ff, featurefile)
end
@@ -193,7 +196,7 @@ function otf.enhance(name,data,filename,verbose)
local enhancer = otf.enhancers[name]
if enhancer then
if (verbose ~= nil and verbose) or trace_loading then
- logs.report("load otf","enhance: %s (%s)",name,filename)
+ report_otf("enhance: %s (%s)",name,filename)
end
enhancer(data,filename)
end
@@ -221,6 +224,8 @@ local enhancers = {
function otf.load(filename,format,sub,featurefile)
local name = file.basename(file.removesuffix(filename))
+ local attr = lfs.attributes(filename)
+ local size, time = attr.size or 0, attr.modification or 0
if featurefile then
name = name .. "@" .. file.removesuffix(file.basename(featurefile))
end
@@ -230,10 +235,9 @@ function otf.load(filename,format,sub,featurefile)
hash = hash .. "-" .. sub
end
hash = containers.cleanname(hash)
- local data = containers.read(otf.cache(), hash)
- local size = lfs.attributes(filename,"size") or 0
- if not data or data.verbose ~= fonts.verbose or data.size ~= size then
- logs.report("load otf","loading: %s (hash: %s)",filename,hash)
+ local data = containers.read(otf.cache,hash)
+ if not data or data.verbose ~= fonts.verbose or data.size ~= size or data.time ~= time then
+ report_otf("loading: %s (hash: %s)",filename,hash)
local ff, messages
if sub then
ff, messages = fontloader.open(filename,sub)
@@ -242,22 +246,22 @@ function otf.load(filename,format,sub,featurefile)
end
if trace_loading and messages and #messages > 0 then
if type(messages) == "string" then
- logs.report("load otf","warning: %s",messages)
+ report_otf("warning: %s",messages)
else
for m=1,#messages do
- logs.report("load otf","warning: %s",tostring(messages[m]))
+ report_otf("warning: %s",tostring(messages[m]))
end
end
else
- logs.report("load otf","font loaded okay")
+ report_otf("font loaded okay")
end
if ff then
load_featurefile(ff,featurefile)
data = fontloader.to_table(ff)
fontloader.close(ff)
if data then
- logs.report("load otf","file size: %s", size)
- logs.report("load otf","enhancing ...")
+ report_otf("file size: %s", size)
+ report_otf("enhancing ...")
for e=1,#enhancers do
otf.enhance(enhancers[e],data,filename)
io.flush() -- we want instant messages
@@ -266,22 +270,23 @@ function otf.load(filename,format,sub,featurefile)
otf.enhance("pack",data,filename)
end
data.size = size
+ data.time = time
data.verbose = fonts.verbose
- logs.report("load otf","saving in cache: %s",filename)
- data = containers.write(otf.cache(), hash, data)
+ report_otf("saving in cache: %s",filename)
+ data = containers.write(otf.cache, hash, data)
collectgarbage("collect")
- data = containers.read(otf.cache(), hash) -- this frees the old table and load the sparse one
+ data = containers.read(otf.cache, hash) -- this frees the old table and load the sparse one
collectgarbage("collect")
else
- logs.report("load otf","loading failed (table conversion error)")
+ report_otf("loading failed (table conversion error)")
end
else
- logs.report("load otf","loading failed (file read error)")
+ report_otf("loading failed (file read error)")
end
end
if data then
if trace_defining then
- logs.report("define font","loading from cache: %s",hash)
+ report_otf("loading from cache: %s",hash)
end
otf.enhance("unpack",data,filename,false) -- no message here
otf.add_dimensions(data)
@@ -332,8 +337,8 @@ function otf.show_feature_order(otfdata,filename)
local sequences = otfdata.luatex.sequences
if sequences and #sequences > 0 then
if trace_loading then
- logs.report("otf check","font %s has %s sequences",filename,#sequences)
- logs.report("otf check"," ")
+ report_otf("font %s has %s sequences",filename,#sequences)
+ report_otf(" ")
end
for nos=1,#sequences do
local sequence = sequences[nos]
@@ -342,7 +347,7 @@ function otf.show_feature_order(otfdata,filename)
local subtables = sequence.subtables or { "no-subtables" }
local features = sequence.features
if trace_loading then
- logs.report("otf check","%3i %-15s %-20s [%s]",nos,name,typ,concat(subtables,","))
+ report_otf("%3i %-15s %-20s [%s]",nos,name,typ,concat(subtables,","))
end
if features then
for feature, scripts in next, features do
@@ -355,16 +360,16 @@ function otf.show_feature_order(otfdata,filename)
tt[#tt+1] = format("[%s: %s]",script,concat(ttt," "))
end
if trace_loading then
- logs.report("otf check"," %s: %s",feature,concat(tt," "))
+ report_otf(" %s: %s",feature,concat(tt," "))
end
end
end
end
if trace_loading then
- logs.report("otf check","\n")
+ report_otf("\n")
end
elseif trace_loading then
- logs.report("otf check","font %s has no sequences",filename)
+ report_otf("font %s has no sequences",filename)
end
end
@@ -579,7 +584,7 @@ otf.enhancers["merge cid fonts"] = function(data,filename)
-- save us some more memory (at the cost of harder tracing)
if data.subfonts then
if data.glyphs and next(data.glyphs) then
- logs.report("load otf","replacing existing glyph table due to subfonts")
+ report_otf("replacing existing glyph table due to subfonts")
end
local cidinfo = data.cidinfo
local verbose = fonts.verbose
@@ -613,17 +618,17 @@ otf.enhancers["merge cid fonts"] = function(data,filename)
subfont.glyphs = nil
end
if trace_loading then
- logs.report("load otf","cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes, nofnames, nofunicodes+nofnames)
+ report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes, nofnames, nofunicodes+nofnames)
end
data.glyphs = glyphs
data.map = data.map or { }
data.map.map = uni_to_int
data.map.backmap = int_to_uni
elseif trace_loading then
- logs.report("load otf","unable to remap cid font, missing cid file for %s",filename)
+ report_otf("unable to remap cid font, missing cid file for %s",filename)
end
elseif trace_loading then
- logs.report("load otf","font %s has no glyphs",filename)
+ report_otf("font %s has no glyphs",filename)
end
end
end
@@ -635,11 +640,11 @@ otf.enhancers["prepare unicode"] = function(data,filename)
local glyphs = data.glyphs
local mapmap = data.map
if not mapmap then
- logs.report("load otf","no map in %s",filename)
+ report_otf("no map in %s",filename)
mapmap = { }
data.map = { map = mapmap }
elseif not mapmap.map then
- logs.report("load otf","no unicode map in %s",filename)
+ report_otf("no unicode map in %s",filename)
mapmap = { }
data.map.map = mapmap
else
@@ -658,7 +663,7 @@ otf.enhancers["prepare unicode"] = function(data,filename)
unicodes[name] = private
internals[index] = true
if trace_private then
- logs.report("load otf","enhance: glyph %s at index U+%04X is moved to private unicode slot U+%04X",name,index,private)
+ report_otf("enhance: glyph %s at index U+%04X is moved to private unicode slot U+%04X",name,index,private)
end
private = private + 1
else
@@ -701,9 +706,9 @@ otf.enhancers["prepare unicode"] = function(data,filename)
end
if trace_loading then
if #multiples > 0 then
- logs.report("load otf","%s glyph are reused: %s",#multiples, concat(multiples," "))
+ report_otf("%s glyph are reused: %s",#multiples, concat(multiples," "))
else
- logs.report("load otf","no glyph are reused")
+ report_otf("no glyph are reused")
end
end
luatex.indices = indices
@@ -789,14 +794,12 @@ otf.enhancers["check math"] = function(data,filename)
if hv then
math.horiz_variants = hv.variants
local p = hv.parts
- if p then
- if #p>0 then
- for i=1,#p do
- local pi = p[i]
- pi.glyph = unicodes[pi.component] or 0
- end
- math.horiz_parts = p
+ if p and #p > 0 then
+ for i=1,#p do
+ local pi = p[i]
+ pi.glyph = unicodes[pi.component] or 0
end
+ math.horiz_parts = p
end
local ic = hv.italic_correction
if ic and ic ~= 0 then
@@ -808,14 +811,12 @@ otf.enhancers["check math"] = function(data,filename)
local uc = unicodes[index]
math.vert_variants = vv.variants
local p = vv.parts
- if p then
- if #p>0 then
- for i=1,#p do
- local pi = p[i]
- pi.glyph = unicodes[pi.component] or 0
- end
- math.vert_parts = p
+ if p and #p > 0 then
+ for i=1,#p do
+ local pi = p[i]
+ pi.glyph = unicodes[pi.component] or 0
end
+ math.vert_parts = p
end
local ic = vv.italic_correction
if ic and ic ~= 0 then
@@ -851,7 +852,7 @@ otf.enhancers["share widths"] = function(data,filename)
end
if most > 1000 then
if trace_loading then
- logs.report("load otf", "most common width: %s (%s times), sharing (cjk font)",wd,most)
+ report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most)
end
for k, v in next, glyphs do
if v.width == wd then
@@ -864,10 +865,15 @@ end
-- kern: ttf has a table with kerns
+-- Weird, as maxfirst and maxseconds can have holes, first seems to be indexed, but
+-- seconds can start at 2 .. this need to be fixed as getn as well as # are sort of
+-- unpredictable alternatively we could force an [1] if not set (maybe I will do that
+-- anyway).
+
--~ otf.enhancers["reorganize kerns"] = function(data,filename)
--~ local glyphs, mapmap, unicodes = data.glyphs, data.luatex.indices, data.luatex.unicodes
--~ local mkdone = false
---~ for index, glyph in next, data.glyphs do
+--~ for index, glyph in next, glyphs do
--~ if glyph.kerns then
--~ local mykerns = { }
--~ for k,v in next, glyph.kerns do
@@ -876,7 +882,7 @@ end
--~ local uvc = unicodes[vc]
--~ if not uvc then
--~ if trace_loading then
---~ logs.report("load otf","problems with unicode %s of kern %s at glyph %s",vc,k,index)
+--~ report_otf("problems with unicode %s of kern %s at glyph %s",vc,k,index)
--~ end
--~ else
--~ if type(vl) ~= "table" then
@@ -906,16 +912,19 @@ end
--~ end
--~ end
--~ if trace_loading and mkdone then
---~ logs.report("load otf", "replacing 'kerns' tables by 'mykerns' tables")
+--~ report_otf("replacing 'kerns' tables by 'mykerns' tables")
--~ end
--~ if data.kerns then
--~ if trace_loading then
---~ logs.report("load otf", "removing global 'kern' table")
+--~ report_otf("removing global 'kern' table")
--~ end
--~ data.kerns = nil
--~ end
--~ local dgpos = data.gpos
--~ if dgpos then
+--~ local separator = lpeg.P(" ")
+--~ local other = ((1 - separator)^0) / unicodes
+--~ local splitter = lpeg.Ct(other * (separator * other)^0)
--~ for gp=1,#dgpos do
--~ local gpos = dgpos[gp]
--~ local subtables = gpos.subtables
@@ -924,56 +933,70 @@ end
--~ local subtable = subtables[s]
--~ local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes
--~ if kernclass then -- the next one is quite slow
+--~ local split = { } -- saves time
--~ for k=1,#kernclass do
--~ local kcl = kernclass[k]
--~ local firsts, seconds, offsets, lookups = kcl.firsts, kcl.seconds, kcl.offsets, kcl.lookup -- singular
--~ if type(lookups) ~= "table" then
--~ lookups = { lookups }
--~ end
+--~ local maxfirsts, maxseconds = getn(firsts), getn(seconds)
+--~ for _, s in next, firsts do
+--~ split[s] = split[s] or lpegmatch(splitter,s)
+--~ end
+--~ for _, s in next, seconds do
+--~ split[s] = split[s] or lpegmatch(splitter,s)
+--~ end
--~ for l=1,#lookups do
--~ local lookup = lookups[l]
---~ -- weird, as maxfirst and maxseconds can have holes
---~ local maxfirsts, maxseconds = getn(firsts), getn(seconds)
---~ if trace_loading then
---~ logs.report("load otf", "adding kernclass %s with %s times %s pairs",lookup, maxfirsts, maxseconds)
---~ end
---~ for fk, fv in next, firsts do
---~ for first in gmatch(fv,"[^ ]+") do
---~ local first_unicode = unicodes[first]
---~ if type(first_unicode) == "number" then
---~ first_unicode = { first_unicode }
+--~ local function do_it(fk,first_unicode)
+--~ local glyph = glyphs[mapmap[first_unicode]]
+--~ if glyph then
+--~ local mykerns = glyph.mykerns
+--~ if not mykerns then
+--~ mykerns = { } -- unicode indexed !
+--~ glyph.mykerns = mykerns
--~ end
---~ for f=1,#first_unicode do
---~ local glyph = glyphs[mapmap[first_unicode[f]]]
---~ if glyph then
---~ local mykerns = glyph.mykerns
---~ if not mykerns then
---~ mykerns = { } -- unicode indexed !
---~ glyph.mykerns = mykerns
---~ end
---~ local lookupkerns = mykerns[lookup]
---~ if not lookupkerns then
---~ lookupkerns = { }
---~ mykerns[lookup] = lookupkerns
---~ end
---~ for sk, sv in next, seconds do
---~ local offset = offsets[(fk-1) * maxseconds + sk]
---~ --~ local offset = offsets[sk] -- (fk-1) * maxseconds + sk]
---~ for second in gmatch(sv,"[^ ]+") do
---~ local second_unicode = unicodes[second]
---~ if type(second_unicode) == "number" then
+--~ local lookupkerns = mykerns[lookup]
+--~ if not lookupkerns then
+--~ lookupkerns = { }
+--~ mykerns[lookup] = lookupkerns
+--~ end
+--~ local baseoffset = (fk-1) * maxseconds
+--~ for sk=2,maxseconds do -- we can avoid this loop with a table
+--~ local sv = seconds[sk]
+--~ local splt = split[sv]
+--~ if splt then
+--~ local offset = offsets[baseoffset + sk]
+--~ --~ local offset = offsets[sk] -- (fk-1) * maxseconds + sk]
+--~ if offset then
+--~ for i=1,#splt do
+--~ local second_unicode = splt[i]
+--~ if tonumber(second_unicode) then
--~ lookupkerns[second_unicode] = offset
---~ else
---~ for s=1,#second_unicode do
---~ lookupkerns[second_unicode[s]] = offset
---~ end
---~ end
+--~ else for s=1,#second_unicode do
+--~ lookupkerns[second_unicode[s]] = offset
+--~ end end
--~ end
--~ end
---~ elseif trace_loading then
---~ logs.report("load otf", "no glyph data for U+%04X", first_unicode[f])
--~ end
--~ end
+--~ elseif trace_loading then
+--~ report_otf("no glyph data for U+%04X", first_unicode)
+--~ end
+--~ end
+--~ for fk=1,#firsts do
+--~ local fv = firsts[fk]
+--~ local splt = split[fv]
+--~ if splt then
+--~ for i=1,#splt do
+--~ local first_unicode = splt[i]
+--~ if tonumber(first_unicode) then
+--~ do_it(fk,first_unicode)
+--~ else for f=1,#first_unicode do
+--~ do_it(fk,first_unicode[f])
+--~ end end
+--~ end
--~ end
--~ end
--~ end
@@ -990,7 +1013,27 @@ end
otf.enhancers["reorganize kerns"] = function(data,filename)
local glyphs, mapmap, unicodes = data.glyphs, data.luatex.indices, data.luatex.unicodes
local mkdone = false
- for index, glyph in next, data.glyphs do
+ local function do_it(lookup,first_unicode,kerns)
+ local glyph = glyphs[mapmap[first_unicode]]
+ if glyph then
+ local mykerns = glyph.mykerns
+ if not mykerns then
+ mykerns = { } -- unicode indexed !
+ glyph.mykerns = mykerns
+ end
+ local lookupkerns = mykerns[lookup]
+ if not lookupkerns then
+ lookupkerns = { }
+ mykerns[lookup] = lookupkerns
+ end
+ for second_unicode, kern in next, kerns do
+ lookupkerns[second_unicode] = kern
+ end
+ elseif trace_loading then
+ report_otf("no glyph data for U+%04X", first_unicode)
+ end
+ end
+ for index, glyph in next, glyphs do
if glyph.kerns then
local mykerns = { }
for k,v in next, glyph.kerns do
@@ -999,7 +1042,7 @@ otf.enhancers["reorganize kerns"] = function(data,filename)
local uvc = unicodes[vc]
if not uvc then
if trace_loading then
- logs.report("load otf","problems with unicode %s of kern %s at glyph %s",vc,k,index)
+ report_otf("problems with unicode %s of kern %s at glyph %s",vc,k,index)
end
else
if type(vl) ~= "table" then
@@ -1029,11 +1072,11 @@ otf.enhancers["reorganize kerns"] = function(data,filename)
end
end
if trace_loading and mkdone then
- logs.report("load otf", "replacing 'kerns' tables by 'mykerns' tables")
+ report_otf("replacing 'kerns' tables by 'mykerns' tables")
end
if data.kerns then
if trace_loading then
- logs.report("load otf", "removing global 'kern' table")
+ report_otf("removing global 'kern' table")
end
data.kerns = nil
end
@@ -1050,75 +1093,53 @@ otf.enhancers["reorganize kerns"] = function(data,filename)
local subtable = subtables[s]
local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes
if kernclass then -- the next one is quite slow
+ local split = { } -- saves time
for k=1,#kernclass do
local kcl = kernclass[k]
local firsts, seconds, offsets, lookups = kcl.firsts, kcl.seconds, kcl.offsets, kcl.lookup -- singular
if type(lookups) ~= "table" then
lookups = { lookups }
end
- local split = { }
+ local maxfirsts, maxseconds = getn(firsts), getn(seconds)
+ -- here we could convert split into a list of unicodes which is a bit
+ -- faster but as this is only done when caching it does not save us much
+ for _, s in next, firsts do
+ split[s] = split[s] or lpegmatch(splitter,s)
+ end
+ for _, s in next, seconds do
+ split[s] = split[s] or lpegmatch(splitter,s)
+ end
for l=1,#lookups do
local lookup = lookups[l]
- -- weird, as maxfirst and maxseconds can have holes, first seems to be indexed, seconds starts at 2
- local maxfirsts, maxseconds = getn(firsts), getn(seconds)
- for _, s in next, firsts do
- split[s] = split[s] or lpegmatch(splitter,s)
- end
- for _, s in next, seconds do
- split[s] = split[s] or lpegmatch(splitter,s)
- end
- if trace_loading then
- logs.report("load otf", "adding kernclass %s with %s times %s pairs",lookup, maxfirsts, maxseconds)
- end
- local function do_it(fk,first_unicode)
- local glyph = glyphs[mapmap[first_unicode]]
- if glyph then
- local mykerns = glyph.mykerns
- if not mykerns then
- mykerns = { } -- unicode indexed !
- glyph.mykerns = mykerns
- end
- local lookupkerns = mykerns[lookup]
- if not lookupkerns then
- lookupkerns = { }
- mykerns[lookup] = lookupkerns
- end
- local baseoffset = (fk-1) * maxseconds
+ for fk=1,#firsts do
+ local fv = firsts[fk]
+ local splt = split[fv]
+ if splt then
+ local kerns, baseoffset = { }, (fk-1) * maxseconds
for sk=2,maxseconds do
local sv = seconds[sk]
- local offset = offsets[baseoffset + sk]
- --~ local offset = offsets[sk] -- (fk-1) * maxseconds + sk]
local splt = split[sv]
if splt then
- for i=1,#splt do
- local second_unicode = splt[i]
- if tonumber(second_unicode) then
- lookupkerns[second_unicode] = offset
- else
- for s=1,#second_unicode do
- lookupkerns[second_unicode[s]] = offset
- end
+ local offset = offsets[baseoffset + sk]
+ if offset then
+ for i=1,#splt do
+ local second_unicode = splt[i]
+ if tonumber(second_unicode) then
+ kerns[second_unicode] = offset
+ else for s=1,#second_unicode do
+ kerns[second_unicode[s]] = offset
+ end end
end
end
end
end
- elseif trace_loading then
- logs.report("load otf", "no glyph data for U+%04X", first_unicode)
- end
- end
- for fk=1,#firsts do
- local fv = firsts[fk]
- local splt = split[fv]
- if splt then
for i=1,#splt do
local first_unicode = splt[i]
if tonumber(first_unicode) then
- do_it(fk,first_unicode)
- else
- for f=1,#first_unicode do
- do_it(fk,first_unicode[f])
- end
- end
+ do_it(lookup,first_unicode,kerns)
+ else for f=1,#first_unicode do
+ do_it(lookup,first_unicode[f],kerns)
+ end end
end
end
end
@@ -1133,6 +1154,14 @@ otf.enhancers["reorganize kerns"] = function(data,filename)
end
end
+
+
+
+
+
+
+
+
otf.enhancers["strip not needed data"] = function(data,filename)
local verbose = fonts.verbose
local int_to_uni = data.luatex.unicodes
@@ -1203,7 +1232,7 @@ otf.enhancers["check math parameters"] = function(data,filename)
local pmp = private_math_parameters[m]
if not mathdata[pmp] then
if trace_loading then
- logs.report("load otf", "setting math parameter '%s' to 0", pmp)
+ report_otf("setting math parameter '%s' to 0", pmp)
end
mathdata[pmp] = 0
end
@@ -1248,11 +1277,11 @@ otf.enhancers["flatten glyph lookups"] = function(data,filename)
end
else
if trace_loading then
- logs.report("load otf", "flattening needed, report to context list")
+ report_otf("flattening needed, report to context list")
end
for a, b in next, s do
if trace_loading and vvv[a] then
- logs.report("load otf", "flattening conflict, report to context list")
+ report_otf("flattening conflict, report to context list")
end
vvv[a] = b
end
@@ -1314,7 +1343,7 @@ otf.enhancers["flatten feature tables"] = function(data,filename)
for _, tag in next, otf.glists do
if data[tag] then
if trace_loading then
- logs.report("load otf", "flattening %s table", tag)
+ report_otf("flattening %s table", tag)
end
for k, v in next, data[tag] do
local features = v.features
@@ -1383,7 +1412,7 @@ function otf.set_features(tfmdata,features)
if value and fiotf[f] then -- brr
if not done[f] then -- so, we can move some to triggers
if trace_features then
- logs.report("define otf","initializing feature %s to %s for mode %s for font %s",f,tostring(value),mode or 'unknown', tfmdata.fullname or 'unknown')
+ report_otf("initializing feature %s to %s for mode %s for font %s",f,tostring(value),mode or 'unknown', tfmdata.fullname or 'unknown')
end
fiotf[f](tfmdata,value) -- can set mode (no need to pass otf)
mode = tfmdata.mode or fonts.mode -- keep this, mode can be set local !
@@ -1410,7 +1439,7 @@ function otf.set_features(tfmdata,features)
local f = list[i]
if fmotf[f] then -- brr
if trace_features then
- logs.report("define otf","installing feature handler %s for mode %s for font %s",f,mode or 'unknown', tfmdata.fullname or 'unknown')
+ report_otf("installing feature handler %s for mode %s for font %s",f,mode or 'unknown', tfmdata.fullname or 'unknown')
end
processes[#processes+1] = fmotf[f]
end
@@ -1432,7 +1461,7 @@ function otf.otf_to_tfm(specification)
local format = specification.format
local features = specification.features.normal
local cache_id = specification.hash
- local tfmdata = containers.read(tfm.cache(),cache_id)
+ local tfmdata = containers.read(tfm.cache,cache_id)
--~ print(cache_id)
if not tfmdata then
local otfdata = otf.load(filename,format,sub,features and features.featurefile)
@@ -1466,7 +1495,7 @@ function otf.otf_to_tfm(specification)
shared.processes, shared.features = otf.set_features(tfmdata,fonts.define.check(features,otf.features.default))
end
end
- containers.write(tfm.cache(),cache_id,tfmdata)
+ containers.write(tfm.cache,cache_id,tfmdata)
end
return tfmdata
end
@@ -1512,7 +1541,7 @@ function otf.copy_to_tfm(data,cache_id) -- we can save a copy when we reorder th
if designsize == 0 then
designsize = 100
end
- local spaceunits = 500
+ local spaceunits, spacer = 500, "space"
-- indices maps from unicodes to indices
for u, i in next, indices do
characters[u] = { } -- we need this because for instance we add protruding info and loop over characters
@@ -1531,9 +1560,8 @@ function otf.copy_to_tfm(data,cache_id) -- we can save a copy when we reorder th
-- we have them shared because that packs nicer
-- we could prepare the variants and keep 'm in descriptions
if m then
- local variants = m.horiz_variants
+ local variants, parts, c = m.horiz_variants, m.horiz_parts, char
if variants then
- local c = char
for n in gmatch(variants,"[^ ]+") do
local un = unicodes[n]
if un and u ~= un then
@@ -1541,21 +1569,26 @@ function otf.copy_to_tfm(data,cache_id) -- we can save a copy when we reorder th
c = characters[un]
end
end
- c.horiz_variants = m.horiz_parts
- else
- local variants = m.vert_variants
- if variants then
- local c = char
- for n in gmatch(variants,"[^ ]+") do
- local un = unicodes[n]
- if un and u ~= un then
- c.next = un
- c = characters[un]
- end
+ c.horiz_variants = parts
+ elseif parts then
+ c.horiz_variants = parts
+ end
+ local variants, parts, c = m.vert_variants, m.vert_parts, char
+ if variants then
+ for n in gmatch(variants,"[^ ]+") do
+ local un = unicodes[n]
+ if un and u ~= un then
+ c.next = un
+ c = characters[un]
end
- c.vert_variants = m.vert_parts
- c.vert_italic_correction = m.vert_italic_correction
- end
+ end -- c is now last in chain
+ c.vert_variants = parts
+ elseif parts then
+ c.vert_variants = parts
+ end
+ local italic_correction = m.vert_italic_correction
+ if italic_correction then
+ c.vert_italic_correction = italic_correction
end
local kerns = m.kerns
if kerns then
@@ -1684,7 +1717,7 @@ function tfm.read_from_open_type(specification)
if p then
local ps = p * specification.textsize / 100
if trace_math then
- logs.report("define font","asked script size: %s, used: %s (%2.2f %%)",s,ps,(ps/s)*100)
+ report_otf("asked script size: %s, used: %s (%2.2f %%)",s,ps,(ps/s)*100)
end
s = ps
end
@@ -1693,7 +1726,7 @@ function tfm.read_from_open_type(specification)
if p then
local ps = p * specification.textsize / 100
if trace_math then
- logs.report("define font","asked scriptscript size: %s, used: %s (%2.2f %%)",s,ps,(ps/s)*100)
+ report_otf("asked scriptscript size: %s, used: %s (%2.2f %%)",s,ps,(ps/s)*100)
end
s = ps
end
@@ -1708,7 +1741,7 @@ function tfm.read_from_open_type(specification)
if specname then
tfmtable.name = specname
if trace_defining then
- logs.report("define font","overloaded fontname: '%s'",specname)
+ report_otf("overloaded fontname: '%s'",specname)
end
end
end
diff --git a/tex/context/base/font-otn.lua b/tex/context/base/font-otn.lua
index d4f89adc6..6dcadddd8 100644
--- a/tex/context/base/font-otn.lua
+++ b/tex/context/base/font-otn.lua
@@ -145,6 +145,12 @@ local trace_steps = false trackers.register("otf.steps", function
local trace_skips = false trackers.register("otf.skips", function(v) trace_skips = v end)
local trace_directions = false trackers.register("otf.directions", function(v) trace_directions = v end)
+local report_direct = logs.new("otf direct")
+local report_subchain = logs.new("otf subchain")
+local report_chain = logs.new("otf chain")
+local report_process = logs.new("otf process")
+local report_prepare = logs.new("otf prepare")
+
trackers.register("otf.verbose_chain", function(v) otf.setcontextchain(v and "verbose") end)
trackers.register("otf.normal_chain", function(v) otf.setcontextchain(v and "normal") end)
@@ -242,10 +248,10 @@ local function logprocess(...)
if trace_steps then
registermessage(...)
end
- logs.report("otf direct",...)
+ report_direct(...)
end
local function logwarning(...)
- logs.report("otf direct",...)
+ report_direct(...)
end
local function gref(n)
@@ -822,7 +828,7 @@ local krn = kerns[nextchar]
end
end
else
- logs.report("%s: check this out (old kern stuff)",pref(kind,lookupname))
+ report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
local a, b = krn[3], krn[7]
if a and a ~= 0 then
local k = set_kern(snext,factor,rlmode,a)
@@ -861,12 +867,11 @@ local function logprocess(...)
if trace_steps then
registermessage(...)
end
- logs.report("otf subchain",...)
-end
-local function logwarning(...)
- logs.report("otf subchain",...)
+ report_subchain(...)
end
+local logwarning = report_subchain
+
-- ['coverage']={
-- ['after']={ "r" },
-- ['before']={ "q" },
@@ -904,12 +909,11 @@ local function logprocess(...)
if trace_steps then
registermessage(...)
end
- logs.report("otf chain",...)
-end
-local function logwarning(...)
- logs.report("otf chain",...)
+ report_chain(...)
end
+local logwarning = report_chain
+
-- We could share functions but that would lead to extra function calls with many
-- arguments, redundant tests and confusing messages.
@@ -1058,7 +1062,7 @@ end
<p>Here we replace start by new glyph. First we delete the rest of the match.</p>
--ldx]]--
-function chainprocs.gsub_alternate(start,stop,kind,lookupname,currentcontext,cache,currentlookup)
+function chainprocs.gsub_alternate(start,stop,kind,chainname,currentcontext,cache,currentlookup,chainlookupname)
-- todo: marks ?
delete_till_stop(start,stop)
local current = start
@@ -1155,7 +1159,7 @@ function chainprocs.gsub_ligature(start,stop,kind,chainname,currentcontext,cache
logprocess("%s: replacing character %s upto %s by ligature %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2))
end
end
- start = toligature(kind,lookup,start,stop,l2,currentlookup.flags[1],discfound)
+ start = toligature(kind,lookupname,start,stop,l2,currentlookup.flags[1],discfound)
return start, true, nofreplacements
elseif trace_bugs then
if start == stop then
@@ -1490,7 +1494,7 @@ function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,cache,cur
end
end
else
- logs.report("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
+ report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
local a, b = krn[3], krn[7]
if a and a ~= 0 then
local k = set_kern(snext,factor,rlmode,a)
@@ -1864,12 +1868,11 @@ local function logprocess(...)
if trace_steps then
registermessage(...)
end
- logs.report("otf process",...)
-end
-local function logwarning(...)
- logs.report("otf process",...)
+ report_process(...)
end
+local logwarning = report_process
+
local function report_missing_cache(typ,lookup)
local f = missing[currentfont] if not f then f = { } missing[currentfont] = f end
local t = f[typ] if not t then t = { } f[typ] = t end
@@ -1882,6 +1885,9 @@ end
local resolved = { } -- we only resolve a font,script,language pair once
-- todo: pass all these 'locals' in a table
+--
+-- dynamics will be isolated some day ... for the moment we catch attribute zero
+-- not being set
function fonts.methods.node.otf.features(head,font,attr)
if trace_steps then
@@ -1926,8 +1932,7 @@ function fonts.methods.node.otf.features(head,font,attr)
local ra = rl [attr] if ra == nil then ra = { } rl [attr] = ra end -- attr can be false
-- sequences always > 1 so no need for optimization
for s=1,#sequences do
- local pardir, txtdir = 0, { }
- local success = false
+ local pardir, txtdir, success = 0, { }, false
local sequence = sequences[s]
local r = ra[s] -- cache
if r == nil then
@@ -1965,7 +1970,7 @@ function fonts.methods.node.otf.features(head,font,attr)
end
if trace_applied then
local typ, action = match(sequence.type,"(.*)_(.*)")
- logs.report("otf node mode",
+ report_process(
"%s font: %03i, dynamic: %03i, kind: %s, lookup: %3i, script: %-4s, language: %-4s (%-4s), type: %s, action: %s, name: %s",
(valid and "+") or "-",font,attr or 0,kind,s,script,language,what,typ,action,sequence.name)
end
@@ -1994,24 +1999,33 @@ function fonts.methods.node.otf.features(head,font,attr)
while start do
local id = start.id
if id == glyph then
- if start.subtype<256 and start.font == font and (not attr or has_attribute(start,0,attr)) then
---~ if start.subtype<256 and start.font == font and has_attribute(start,0,attr) then
- for i=1,#subtables do
- local lookupname = subtables[i]
- local lookupcache = thecache[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- start, success = handler(start,r[4],lookupname,lookupmatch,sequence,featuredata,i)
- if success then
- break
+ if start.subtype<256 and start.font == font then
+ local a = has_attribute(start,0)
+ if a then
+ a = a == attr
+ else
+ a = true
+ end
+ if a then
+ for i=1,#subtables do
+ local lookupname = subtables[i]
+ local lookupcache = thecache[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[start.char]
+ if lookupmatch then
+ start, success = handler(start,r[4],lookupname,lookupmatch,sequence,featuredata,i)
+ if success then
+ break
+ end
end
+ else
+ report_missing_cache(typ,lookupname)
end
- else
- report_missing_cache(typ,lookupname)
end
+ if start then start = start.prev end
+ else
+ start = start.prev
end
- if start then start = start.prev end
else
start = start.prev
end
@@ -2034,18 +2048,27 @@ function fonts.methods.node.otf.features(head,font,attr)
while start do
local id = start.id
if id == glyph then
---~ if start.font == font and start.subtype<256 and has_attribute(start,0,attr) and (not attribute or has_attribute(start,state,attribute)) then
- if start.font == font and start.subtype<256 and (not attr or has_attribute(start,0,attr)) and (not attribute or has_attribute(start,state,attribute)) then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- -- sequence kan weg
- local ok
- start, ok = handler(start,r[4],lookupname,lookupmatch,sequence,featuredata,1)
- if ok then
- success = true
+ if start.subtype<256 and start.font == font then
+ local a = has_attribute(start,0)
+ if a then
+ a = (a == attr) and (not attribute or has_attribute(start,state,attribute))
+ else
+ a = not attribute or has_attribute(start,state,attribute)
+ end
+ if a then
+ local lookupmatch = lookupcache[start.char]
+ if lookupmatch then
+ -- sequence kan weg
+ local ok
+ start, ok = handler(start,r[4],lookupname,lookupmatch,sequence,featuredata,1)
+ if ok then
+ success = true
+ end
end
+ if start then start = start.next end
+ else
+ start = start.next
end
- if start then start = start.next end
else
start = start.next
end
@@ -2082,7 +2105,7 @@ function fonts.methods.node.otf.features(head,font,attr)
rlmode = pardir
end
if trace_directions then
- logs.report("fonts","directions after textdir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
+ report_process("directions after textdir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
end
elseif subtype == 6 then
local dir = start.dir
@@ -2096,7 +2119,7 @@ function fonts.methods.node.otf.features(head,font,attr)
rlmode = pardir
--~ txtdir = { }
if trace_directions then
- logs.report("fonts","directions after pardir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
+ report_process("directions after pardir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
end
end
start = start.next
@@ -2109,27 +2132,36 @@ function fonts.methods.node.otf.features(head,font,attr)
while start do
local id = start.id
if id == glyph then
- if start.subtype<256 and start.font == font and (not attr or has_attribute(start,0,attr)) and (not attribute or has_attribute(start,state,attribute)) then
---~ if start.subtype<256 and start.font == font and has_attribute(start,0,attr) and (not attribute or has_attribute(start,state,attribute)) then
- for i=1,ns do
- local lookupname = subtables[i]
- local lookupcache = thecache[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local ok
- start, ok = handler(start,r[4],lookupname,lookupmatch,sequence,featuredata,i)
- if ok then
- success = true
- break
+ if start.subtype<256 and start.font == font then
+ local a = has_attribute(start,0)
+ if a then
+ a = (a == attr) and (not attribute or has_attribute(start,state,attribute))
+ else
+ a = not attribute or has_attribute(start,state,attribute)
+ end
+ if a then
+ for i=1,ns do
+ local lookupname = subtables[i]
+ local lookupcache = thecache[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[start.char]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local ok
+ start, ok = handler(start,r[4],lookupname,lookupmatch,sequence,featuredata,i)
+ if ok then
+ success = true
+ break
+ end
end
+ else
+ report_missing_cache(typ,lookupname)
end
- else
- report_missing_cache(typ,lookupname)
end
+ if start then start = start.next end
+ else
+ start = start.next
end
- if start then start = start.next end
else
start = start.next
end
@@ -2150,7 +2182,6 @@ function fonts.methods.node.otf.features(head,font,attr)
-- end
elseif id == whatsit then
local subtype = start.subtype
- local subtype = start.subtype
if subtype == 7 then
local dir = start.dir
if dir == "+TRT" or dir == "+TLT" then
@@ -2167,7 +2198,7 @@ function fonts.methods.node.otf.features(head,font,attr)
rlmode = pardir
end
if trace_directions then
- logs.report("fonts","directions after textdir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
+ report_process("directions after textdir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
end
elseif subtype == 6 then
local dir = start.dir
@@ -2181,7 +2212,7 @@ function fonts.methods.node.otf.features(head,font,attr)
rlmode = pardir
--~ txtdir = { }
if trace_directions then
- logs.report("fonts","directions after pardir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
+ report_process("directions after pardir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
end
end
start = start.next
@@ -2280,7 +2311,7 @@ local function prepare_lookups(tfmdata)
-- as well (no big deal)
--
local action = {
- substitution = function(p,lookup,k,glyph,unicode)
+ substitution = function(p,lookup,glyph,unicode)
local old, new = unicode, unicodes[p[2]]
if type(new) == "table" then
new = new[1]
@@ -2289,10 +2320,10 @@ local function prepare_lookups(tfmdata)
if not s then s = { } single[lookup] = s end
s[old] = new
--~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: substitution %s => %s",lookup,old,new)
+ --~ report_prepare("lookup %s: substitution %s => %s",lookup,old,new)
--~ end
end,
- multiple = function (p,lookup,k,glyph,unicode)
+ multiple = function (p,lookup,glyph,unicode)
local old, new = unicode, { }
local m = multiple[lookup]
if not m then m = { } multiple[lookup] = m end
@@ -2306,10 +2337,10 @@ local function prepare_lookups(tfmdata)
end
end
--~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: multiple %s => %s",lookup,old,concat(new," "))
+ --~ report_prepare("lookup %s: multiple %s => %s",lookup,old,concat(new," "))
--~ end
end,
- alternate = function(p,lookup,k,glyph,unicode)
+ alternate = function(p,lookup,glyph,unicode)
local old, new = unicode, { }
local a = alternate[lookup]
if not a then a = { } alternate[lookup] = a end
@@ -2323,12 +2354,12 @@ local function prepare_lookups(tfmdata)
end
end
--~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: alternate %s => %s",lookup,old,concat(new,"|"))
+ --~ report_prepare("lookup %s: alternate %s => %s",lookup,old,concat(new,"|"))
--~ end
end,
- ligature = function (p,lookup,k,glyph,unicode)
+ ligature = function (p,lookup,glyph,unicode)
--~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: ligature %s => %s",lookup,p[2],glyph.name)
+ --~ report_prepare("lookup %s: ligature %s => %s",lookup,p[2],glyph.name)
--~ end
local first = true
local t = ligature[lookup]
@@ -2337,7 +2368,7 @@ local function prepare_lookups(tfmdata)
if first then
local u = unicodes[s]
if not u then
- logs.report("define otf","lookup %s: ligature %s => %s ignored due to invalid unicode",lookup,p[2],glyph.name)
+ report_prepare("lookup %s: ligature %s => %s ignored due to invalid unicode",lookup,p[2],glyph.name)
break
elseif type(u) == "number" then
if not t[u] then
@@ -2374,13 +2405,13 @@ local function prepare_lookups(tfmdata)
end
t[2] = unicode
end,
- position = function(p,lookup,k,glyph,unicode)
+ position = function(p,lookup,glyph,unicode)
-- not used
local s = position[lookup]
if not s then s = { } position[lookup] = s end
s[unicode] = p[2] -- direct pointer to kern spec
end,
- pair = function(p,lookup,k,glyph,unicode)
+ pair = function(p,lookup,glyph,unicode)
local s = pair[lookup]
if not s then s = { } pair[lookup] = s end
local others = s[unicode]
@@ -2407,7 +2438,7 @@ local function prepare_lookups(tfmdata)
end
end
--~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: pair for U+%04X",lookup,unicode)
+ --~ report_prepare("lookup %s: pair for U+%04X",lookup,unicode)
--~ end
end,
}
@@ -2416,7 +2447,7 @@ local function prepare_lookups(tfmdata)
local lookups = glyph.slookups
if lookups then
for lookup, p in next, lookups do
- action[p[1]](p,lookup,k,glyph,unicode)
+ action[p[1]](p,lookup,glyph,unicode)
end
end
local lookups = glyph.mlookups
@@ -2424,7 +2455,7 @@ local function prepare_lookups(tfmdata)
for lookup, whatever in next, lookups do
for i=1,#whatever do -- normaly one
local p = whatever[i]
- action[p[1]](p,lookup,k,glyph,unicode)
+ action[p[1]](p,lookup,glyph,unicode)
end
end
end
@@ -2435,7 +2466,7 @@ local function prepare_lookups(tfmdata)
if not k then k = { } kerns[lookup] = k end
k[unicode] = krn -- ref to glyph, saves lookup
--~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: kern for U+%04X",lookup,unicode)
+ --~ report_prepare("lookup %s: kern for U+%04X",lookup,unicode)
--~ end
end
end
@@ -2451,7 +2482,7 @@ local function prepare_lookups(tfmdata)
if not f then f = { } mark[lookup] = f end
f[unicode] = anchors -- ref to glyph, saves lookup
--~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: mark anchor %s for U+%04X",lookup,name,unicode)
+ --~ report_prepare("lookup %s: mark anchor %s for U+%04X",lookup,name,unicode)
--~ end
end
end
@@ -2465,7 +2496,7 @@ local function prepare_lookups(tfmdata)
if not f then f = { } cursive[lookup] = f end
f[unicode] = anchors -- ref to glyph, saves lookup
--~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: exit anchor %s for U+%04X",lookup,name,unicode)
+ --~ report_prepare("lookup %s: exit anchor %s for U+%04X",lookup,name,unicode)
--~ end
end
end
@@ -2479,7 +2510,7 @@ end
-- local cache = { }
luatex = luatex or {} -- this has to change ... we need a better one
-function prepare_contextchains(tfmdata)
+local function prepare_contextchains(tfmdata)
local otfdata = tfmdata.shared.otfdata
local lookups = otfdata.lookups
if lookups then
@@ -2498,7 +2529,7 @@ function prepare_contextchains(tfmdata)
for lookupname, lookupdata in next, otfdata.lookups do
local lookuptype = lookupdata.type
if not lookuptype then
- logs.report("otf process","missing lookuptype for %s",lookupname)
+ report_prepare("missing lookuptype for %s",lookupname)
else
local rules = lookupdata.rules
if rules then
@@ -2506,7 +2537,7 @@ function prepare_contextchains(tfmdata)
-- contextchain[lookupname][unicode]
if fmt == "coverage" then
if lookuptype ~= "chainsub" and lookuptype ~= "chainpos" then
- logs.report("otf process","unsupported coverage %s for %s",lookuptype,lookupname)
+ report_prepare("unsupported coverage %s for %s",lookuptype,lookupname)
else
local contexts = contextchain[lookupname]
if not contexts then
@@ -2542,7 +2573,7 @@ function prepare_contextchains(tfmdata)
end
elseif fmt == "reversecoverage" then
if lookuptype ~= "reversesub" then
- logs.report("otf process","unsupported reverse coverage %s for %s",lookuptype,lookupname)
+ report_prepare("unsupported reverse coverage %s for %s",lookuptype,lookupname)
else
local contexts = reversecontextchain[lookupname]
if not contexts then
@@ -2582,7 +2613,7 @@ function prepare_contextchains(tfmdata)
end
elseif fmt == "glyphs" then
if lookuptype ~= "chainsub" and lookuptype ~= "chainpos" then
- logs.report("otf process","unsupported coverage %s for %s",lookuptype,lookupname)
+ report_prepare("unsupported coverage %s for %s",lookuptype,lookupname)
else
local contexts = contextchain[lookupname]
if not contexts then
@@ -2653,7 +2684,7 @@ function fonts.initializers.node.otf.features(tfmdata,value)
prepare_lookups(tfmdata)
otfdata.shared.initialized = true
if trace_preparing then
- logs.report("otf process","preparation time is %0.3f seconds for %s",os.clock()-t,tfmdata.fullname or "?")
+ report_prepare("preparation time is %0.3f seconds for %s",os.clock()-t,tfmdata.fullname or "?")
end
end
end
diff --git a/tex/context/base/font-otp.lua b/tex/context/base/font-otp.lua
index a80c515ad..f01468bf2 100644
--- a/tex/context/base/font-otp.lua
+++ b/tex/context/base/font-otp.lua
@@ -13,6 +13,8 @@ local sort, concat = table.sort, table.concat
local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
+local report_otf = logs.new("load otf")
+
fonts = fonts or { }
fonts.otf = fonts.otf or { }
fonts.otf.enhancers = fonts.otf.enhancers or { }
@@ -70,7 +72,7 @@ function fonts.otf.enhancers.pack(data)
local function success(stage,pass)
if #t == 0 then
if trace_loading then
- logs.report("load otf","pack quality: nothing to pack")
+ report_otf("pack quality: nothing to pack")
end
return false
elseif #t >= threshold then
@@ -98,12 +100,12 @@ function fonts.otf.enhancers.pack(data)
data.tables = tt
end
if trace_loading then
- logs.report("load otf","pack quality: stage %s, pass %s, %s packed, 1-10:%s, 11-20:%s, rest:%s (criterium: %s)", stage, pass, one+two+rest, one, two, rest, criterium)
+ report_otf("pack quality: stage %s, pass %s, %s packed, 1-10:%s, 11-20:%s, rest:%s (criterium: %s)", stage, pass, one+two+rest, one, two, rest, criterium)
end
return true
else
if trace_loading then
- logs.report("load otf","pack quality: stage %s, pass %s, %s packed, aborting pack (threshold: %s)", stage, pass, #t, threshold)
+ report_otf("pack quality: stage %s, pass %s, %s packed, aborting pack (threshold: %s)", stage, pass, #t, threshold)
end
return false
end
diff --git a/tex/context/base/font-ott.lua b/tex/context/base/font-ott.lua
index 2be1bf06c..c56e98498 100644
--- a/tex/context/base/font-ott.lua
+++ b/tex/context/base/font-ott.lua
@@ -696,7 +696,6 @@ function otf.meanings.normalize(features)
k = lower(k)
if k == "language" or k == "lang" then
v = gsub(lower(v),"[^a-z0-9%-]","")
- k = language
if not languages[v] then
h.language = to_languages[v] or "dflt"
else
diff --git a/tex/context/base/font-pat.lua b/tex/context/base/font-pat.lua
index 6aba4d47e..b6531abb9 100644
--- a/tex/context/base/font-pat.lua
+++ b/tex/context/base/font-pat.lua
@@ -10,6 +10,8 @@ local match, lower, find = string.match, string.lower, string.find
local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
+local report_otf = logs.new("load otf")
+
-- this will become a per font patch file
--
-- older versions of latin modern didn't have the designsize set
@@ -22,7 +24,7 @@ local function patch(data,filename)
local ds = match(file.basename(lower(filename)),"(%d+)")
if ds then
if trace_loading then
- logs.report("load otf","patching design size (%s)",ds)
+ report_otf("patching design size (%s)",ds)
end
data.design_size = tonumber(ds) * 10
end
@@ -32,7 +34,7 @@ local function patch(data,filename)
-- beware, this is a hack, features for latin often don't apply to greek
-- but lm has not much features anyway (and only greek for math)
if trace_loading then
- logs.report("load otf","adding 13 greek capitals")
+ report_otf("adding 13 greek capitals")
end
uni_to_ind[0x391] = uni_to_ind[0x41]
uni_to_ind[0x392] = uni_to_ind[0x42]
@@ -75,7 +77,7 @@ local function patch(data,filename)
local v = gpos[k]
if not v.features and v.type == "gpos_mark2mark" then
if trace_loading then
- logs.report("load otf","patching mkmk feature (name: %s)", v.name or "?")
+ report_otf("patching mkmk feature (name: %s)", v.name or "?")
end
v.features = {
{
@@ -101,7 +103,7 @@ local function patch_domh(data,filename,threshold)
local d = m.DisplayOperatorMinHeight or 0
if d < threshold then
if trace_loading then
- logs.report("load otf","patching DisplayOperatorMinHeight(%s -> %s)",d,threshold)
+ report_otf("patching DisplayOperatorMinHeight(%s -> %s)",d,threshold)
end
m.DisplayOperatorMinHeight = threshold
end
@@ -113,7 +115,7 @@ local function patch_domh(data,filename,threshold)
local width, italic = g.width or 0, g.italic_correction or 0
local newwidth = width - italic
if trace_loading then
- logs.report("load otf","patching width of %s: %s (width) - %s (italic) = %s",name,width,italic,newwidth)
+ report_otf("patching width of %s: %s (width) - %s (italic) = %s",name,width,italic,newwidth)
end
g.width = newwidth
end
diff --git a/tex/context/base/font-syn.lua b/tex/context/base/font-syn.lua
index 5ad92e002..b698fe27f 100644
--- a/tex/context/base/font-syn.lua
+++ b/tex/context/base/font-syn.lua
@@ -20,6 +20,8 @@ local unpack = unpack or table.unpack
local trace_names = false trackers.register("fonts.names", function(v) trace_names = v end)
local trace_warnings = false trackers.register("fonts.warnings", function(v) trace_warnings = v end)
+local report_names = logs.new("fontnames")
+
--[[ldx--
<p>This module implements a name to filename resolver. Names are resolved
using a table that has keys filtered from the font related files.</p>
@@ -44,7 +46,7 @@ names.saved = false
names.loaded = false
names.be_clever = true
names.enabled = true
-names.autoreload = toboolean(os.env['MTX.FONTS.AUTOLOAD'] or os.env['MTX_FONTS_AUTOLOAD'] or "no")
+names.autoreload = toboolean(os.getenv('MTX.FONTS.AUTOLOAD') or os.getenv('MTX_FONTS_AUTOLOAD') or "no")
names.cache = containers.define("fonts","data",names.version,true)
--[[ldx--
@@ -123,13 +125,13 @@ function names.splitspec(askedname)
width = width and lpegmatch(widths, width) or width
variant = variant and lpegmatch(variants,variant) or variant
if trace_names then
- logs.report("fonts","requested name '%s' split in name '%s', weight '%s', style '%s', width '%s' and variant '%s'",
+ report_names("requested name '%s' split in name '%s', weight '%s', style '%s', width '%s' and variant '%s'",
askedname,name or '',weight or '',style or '',width or '',variant or '')
end
if not weight or not weight or not width or not variant then
weight, style, width, variant = weight or "normal", style or "normal", width or "normal", variant or "normal"
if trace_names then
- logs.report("fonts","request '%s' normalized to '%s-%s-%s-%s-%s'",
+ report_names("request '%s' normalized to '%s-%s-%s-%s-%s'",
askedname,name,weight,style,width,variant)
end
end
@@ -218,11 +220,12 @@ filters.names = { }
function names.getpaths(trace)
local hash, result = { }, { }
- local function collect(t)
+ local function collect(t,where)
for i=1, #t do
local v = resolvers.clean_path(t[i])
- v = gsub(v,"/+$","")
+ v = gsub(v,"/+$","") -- not needed any more
local key = lower(v)
+ report_names("adding path from %s: %s",where,v)
if not hash[key] then
hash[key], result[#result+1] = true, v
end
@@ -230,13 +233,16 @@ function names.getpaths(trace)
end
local path = names.environment_path_variable or ""
if path ~= "" then
- collect(resolvers.expanded_path_list(path))
+ collect(resolvers.expanded_path_list(path),path)
end
if xml then
- local confname = names.xml_configuration_file or ""
+ local confname = resolvers.getenv("FONTCONFIG_FILE") or ""
+ if confname == "" then
+ confname = names.xml_configuration_file or ""
+ end
if confname ~= "" then
-- first look in the tex tree
- local name = resolvers.find_file(confname,"other")
+ local name = resolvers.find_file(confname,"fontconfig files") or ""
if name == "" then
-- after all, fontconfig is a unix thing
name = file.join("/etc",confname)
@@ -246,7 +252,7 @@ function names.getpaths(trace)
end
if name ~= "" and lfs.isfile(name) then
if trace_names then
- logs.report("fontnames","loading fontconfig file: %s",name)
+ report_names("loading fontconfig file: %s",name)
end
local xmldata = xml.load(name)
-- begin of untested mess
@@ -259,19 +265,19 @@ function names.getpaths(trace)
end
if lfs.isfile(incname) then
if trace_names then
- logs.report("fontnames","merging included fontconfig file: %s",incname)
+ report_names("merging included fontconfig file: %s",incname)
end
return io.loaddata(incname)
elseif trace_names then
- logs.report("fontnames","ignoring included fontconfig file: %s",incname)
+ report_names("ignoring included fontconfig file: %s",incname)
end
end)
-- end of untested mess
local fontdirs = xml.collect_texts(xmldata,"dir",true)
if trace_names then
- logs.report("fontnames","%s dirs found in fontconfig",#fontdirs)
+ report_names("%s dirs found in fontconfig",#fontdirs)
end
- collect(fontdirs)
+ collect(fontdirs,"fontconfig file")
end
end
end
@@ -308,7 +314,7 @@ local function walk_tree(pathlist,suffix,identify)
path = resolvers.clean_path(path .. "/")
path = gsub(path,"/+","/")
local pattern = path .. "**." .. suffix -- ** forces recurse
- logs.report("fontnames", "globbing path %s",pattern)
+ report_names( "globbing path %s",pattern)
local t = dir.glob(pattern)
sort(t,sorter)
for j=1,#t do
@@ -529,12 +535,12 @@ local function checkduplicate(where) -- fails on "Romantik" but that's a border
local nv = #v
if nv > 1 then
if trace_warnings then
- logs.report("fontnames", "double lookup: %s => %s",k,concat(v," | "))
+ report_names( "double lookup: %s => %s",k,concat(v," | "))
end
n = n + nv
end
end
- logs.report("fontnames", "%s double lookups in %s",n,where)
+ report_names( "%s double lookups in %s",n,where)
end
local function checkduplicates()
@@ -590,26 +596,43 @@ end
local function analysefiles()
local data = names.data
- local done, totalnofread, totalnofskipped = { }, 0, 0
+ local done, totalnofread, totalnofskipped, totalnofduplicates, nofread, nofskipped, nofduplicates = { }, 0, 0, 0, 0, 0, 0
local skip_paths, skip_names = filters.paths, filters.names
local function identify(completename,name,suffix,storedname)
local basename = file.basename(completename)
local basepath = file.dirname(completename)
+ nofread = nofread + 1
if done[name] then
-- already done (avoid otf afm clash)
+ if trace_names then
+ report_names("%s font %s already done",suffix,completename)
+ logs.push()
+ end
+ nofduplicates = nofduplicates + 1
+ nofskipped = nofskipped + 1
elseif not io.exists(completename) then
-- weird error
+ if trace_names then
+ report_names("%s font %s does not really exist",suffix,completename)
+ logs.push()
+ end
+ nofskipped = nofskipped + 1
elseif not file.is_qualified_path(completename) and resolvers.find_file(completename,suffix) == "" then
-- not locateble by backend anyway
+ if trace_names then
+ report_names("%s font %s cannot be found by backend",suffix,completename)
+ logs.push()
+ end
+ nofskipped = nofskipped + 1
else
- nofread = nofread + 1
if #skip_paths > 0 then
for i=1,#skip_paths do
if find(basepath,skip_paths[i]) then
if trace_names then
- logs.report("fontnames","rejecting path of %s font %s",suffix,completename)
+ report_names("rejecting path of %s font %s",suffix,completename)
logs.push()
end
+ nofskipped = nofskipped + 1
return
end
end
@@ -619,15 +642,16 @@ local function analysefiles()
if find(basename,skip_names[i]) then
done[name] = true
if trace_names then
- logs.report("fontnames","rejecting name of %s font %s",suffix,completename)
+ report_names("rejecting name of %s font %s",suffix,completename)
logs.push()
end
+ nofskipped = nofskipped + 1
return
end
end
end
if trace_names then
- logs.report("fontnames","identifying %s font %s",suffix,completename)
+ report_names("identifying %s font %s",suffix,completename)
logs.push()
end
local result, message = filters[lower(suffix)](completename)
@@ -635,24 +659,25 @@ local function analysefiles()
logs.pop()
end
if result then
- if not result[1] then
- local ok = check_name(data,result,storedname,suffix)
- if not ok then
- nofskipped = nofskipped + 1
- end
- else
+ if result[1] then
for r=1,#result do
local ok = check_name(data,result[r],storedname,suffix,r-1) -- subfonts start at zero
- if not ok then
- nofskipped = nofskipped + 1
- end
+ -- if not ok then
+ -- nofskipped = nofskipped + 1
+ -- end
end
+ else
+ local ok = check_name(data,result,storedname,suffix)
+ -- if not ok then
+ -- nofskipped = nofskipped + 1
+ -- end
end
if trace_warnings and message and message ~= "" then
- logs.report("fontnames","warning when identifying %s font %s: %s",suffix,completename,message)
+ report_names("warning when identifying %s font %s: %s",suffix,completename,message)
end
elseif trace_warnings then
- logs.report("fontnames","error when identifying %s font %s: %s",suffix,completename,message or "unknown")
+ nofskipped = nofskipped + 1
+ report_names("error when identifying %s font %s: %s",suffix,completename,message or "unknown")
end
done[name] = true
end
@@ -662,27 +687,32 @@ local function analysefiles()
for n=1,#list do
local suffix = list[n]
local t = os.gettimeofday() -- use elapser
- nofread, nofskipped = 0, 0
+ nofread, nofskipped, nofduplicates = 0, 0, 0
suffix = lower(suffix)
- logs.report("fontnames", "identifying %s font files with suffix %s",what,suffix)
+ report_names( "identifying %s font files with suffix %s",what,suffix)
method(suffix)
suffix = upper(suffix)
- logs.report("fontnames", "identifying %s font files with suffix %s",what,suffix)
+ report_names( "identifying %s font files with suffix %s",what,suffix)
method(suffix)
- totalnofread, totalnofskipped = totalnofread + nofread, totalnofskipped + nofskipped
+ totalnofread, totalnofskipped, totalnofduplicates = totalnofread + nofread, totalnofskipped + nofskipped, totalnofduplicates + nofduplicates
local elapsed = os.gettimeofday() - t
- logs.report("fontnames", "%s %s files identified, %s hash entries added, runtime %0.3f seconds",nofread,what,nofread-nofskipped,elapsed)
+ report_names( "%s %s files identified, %s skipped, %s duplicates, %s hash entries added, runtime %0.3f seconds",nofread,what,nofskipped,nofduplicates,nofread-nofskipped,elapsed)
end
end
if not trace_warnings then
- logs.report("fontnames", "warnings are disabled (tracker 'fonts.warnings')")
+ report_names( "warnings are disabled (tracker 'fonts.warnings')")
end
traverse("tree", function(suffix) -- TEXTREE only
resolvers.with_files(".*%." .. suffix .. "$", function(method,root,path,name)
- if method == "file" then
+ if method == "file" or method == "tree" then
local completename = root .."/" .. path .. "/" .. name
- identify(completename,name,suffix,name,name)
+ identify(completename,name,suffix,name)
+ return true
end
+ end, function(blobtype,blobpath,pattern)
+ report_names( "scanning %s for %s files",blobpath,suffix)
+ end, function(blobtype,blobpath,pattern,total,checked,done)
+ report_names( "%s entries found, %s %s files checked, %s okay",total,checked,suffix,done)
end)
end)
if texconfig.kpse_init then
@@ -697,7 +727,7 @@ local function analysefiles()
walk_tree(names.getpaths(trace),suffix,identify)
end)
end
- data.statistics.readfiles, data.statistics.skippedfiles = totalnofread, totalnofskipped
+ data.statistics.readfiles, data.statistics.skippedfiles, data.statistics.duplicatefiles = totalnofread, totalnofskipped, totalnofduplicates
end
local function rejectclashes() -- just to be sure, so no explicit afm will be found then
@@ -709,7 +739,7 @@ local function rejectclashes() -- just to be sure, so no explicit afm will be fo
local fnd, fnm = used[f], s.filename
if fnd then
if trace_warnings then
- logs.report("fontnames", "fontname '%s' clashes, rejecting '%s' in favor of '%s'",f,fnm,fnd)
+ report_names( "fontname '%s' clashes, rejecting '%s' in favor of '%s'",f,fnm,fnd)
end
else
used[f], okay[#okay+1] = fnm, s
@@ -720,7 +750,7 @@ local function rejectclashes() -- just to be sure, so no explicit afm will be fo
end
local d = #specifications - #okay
if d > 0 then
- logs.report("fontnames", "%s files rejected due to clashes",d)
+ report_names( "%s files rejected due to clashes",d)
end
names.data.specifications = okay
end
@@ -754,13 +784,13 @@ function names.identify()
end
function names.is_permitted(name)
- return containers.is_usable(names.cache(), name)
+ return containers.is_usable(names.cache, name)
end
function names.write_data(name,data)
- containers.write(names.cache(),name,data)
+ containers.write(names.cache,name,data)
end
function names.read_data(name)
- return containers.read(names.cache(),name)
+ return containers.read(names.cache,name)
end
function names.load(reload,verbose)
@@ -770,7 +800,7 @@ function names.load(reload,verbose)
names.identify(verbose)
names.write_data(names.basename,names.data)
else
- logs.report("font table", "unable to access database cache")
+ report_names("unable to access database cache")
end
names.saved = true
end
@@ -783,7 +813,7 @@ function names.load(reload,verbose)
names.saved = true
end
if not data then
- logs.report("font table", "accessing the data table failed")
+ report_names("accessing the data table failed")
else
unpackreferences()
sorthashes()
@@ -842,10 +872,10 @@ local function is_reloaded()
local c_status = table.serialize(resolvers.data_state())
local f_status = table.serialize(data.data_state)
if c_status == f_status then
- -- logs.report("fonts","font database matches configuration and file hashes")
+ -- report_names("font database matches configuration and file hashes")
return
else
- logs.report("fonts","font database does not match configuration and file hashes")
+ report_names("font database does not match configuration and file hashes")
end
end
names.loaded = false
@@ -886,7 +916,7 @@ local function foundname(name,sub) -- sub is not used currently
local found = mappings[l][name]
if found then
if trace_names then
- logs.report("fonts","resolved via direct name match: '%s'",name)
+ report_names("resolved via direct name match: '%s'",name)
end
return found
end
@@ -896,7 +926,7 @@ local function foundname(name,sub) -- sub is not used currently
local found, fname = fuzzy(mappings[l],sorted_mappings[l],name,sub)
if found then
if trace_names then
- logs.report("fonts","resolved via fuzzy name match: '%s' => '%s'",name,fname)
+ report_names("resolved via fuzzy name match: '%s' => '%s'",name,fname)
end
return found
end
@@ -906,7 +936,7 @@ local function foundname(name,sub) -- sub is not used currently
local found = fallbacks[l][name]
if found then
if trace_names then
- logs.report("fonts","resolved via direct fallback match: '%s'",name)
+ report_names("resolved via direct fallback match: '%s'",name)
end
return found
end
@@ -916,11 +946,14 @@ local function foundname(name,sub) -- sub is not used currently
local found, fname = fuzzy(sorted_mappings[l],sorted_fallbacks[l],name,sub)
if found then
if trace_names then
- logs.report("fonts","resolved via fuzzy fallback match: '%s' => '%s'",name,fname)
+ report_names("resolved via fuzzy fallback match: '%s' => '%s'",name,fname)
end
return found
end
end
+ if trace_names then
+ report_names("font with name '%s' cannot be found",name)
+ end
end
function names.resolvedspecification(askedname,sub)
@@ -1144,7 +1177,7 @@ local function collect(stage,found,done,name,weight,style,width,variant,all)
strictname = "^".. name -- to be checked
local family = families[name]
if trace_names then
- logs.report("fonts","resolving name '%s', weight '%s', style '%s', width '%s', variant '%s'",
+ report_names("resolving name '%s', weight '%s', style '%s', width '%s', variant '%s'",
name or "?",tostring(weight),tostring(style),tostring(width),tostring(variant))
end
--~ print(name,table.serialize(family))
@@ -1153,27 +1186,27 @@ local function collect(stage,found,done,name,weight,style,width,variant,all)
if width and width ~= "" then
if variant and variant ~= "" then
if trace_names then
- logs.report("fonts","resolving stage %s, name '%s', weight '%s', style '%s', width '%s', variant '%s'",stage,name,weight,style,width,variant)
+ report_names("resolving stage %s, name '%s', weight '%s', style '%s', width '%s', variant '%s'",stage,name,weight,style,width,variant)
end
s_collect_weight_style_width_variant(found,done,all,weight,style,width,variant,family)
m_collect_weight_style_width_variant(found,done,all,weight,style,width,variant,families,sorted,strictname)
else
if trace_names then
- logs.report("fonts","resolving stage %s, name '%s', weight '%s', style '%s', width '%s'",stage,name,weight,style,width)
+ report_names("resolving stage %s, name '%s', weight '%s', style '%s', width '%s'",stage,name,weight,style,width)
end
s_collect_weight_style_width(found,done,all,weight,style,width,family)
m_collect_weight_style_width(found,done,all,weight,style,width,families,sorted,strictname)
end
else
if trace_names then
- logs.report("fonts","resolving stage %s, name '%s', weight '%s', style '%s'",stage,name,weight,style)
+ report_names("resolving stage %s, name '%s', weight '%s', style '%s'",stage,name,weight,style)
end
s_collect_weight_style(found,done,all,weight,style,family)
m_collect_weight_style(found,done,all,weight,style,families,sorted,strictname)
end
else
if trace_names then
- logs.report("fonts","resolving stage %s, name '%s', weight '%s'",stage,name,weight)
+ report_names("resolving stage %s, name '%s', weight '%s'",stage,name,weight)
end
s_collect_weight(found,done,all,weight,family)
m_collect_weight(found,done,all,weight,families,sorted,strictname)
@@ -1181,33 +1214,33 @@ local function collect(stage,found,done,name,weight,style,width,variant,all)
elseif style and style ~= "" then
if width and width ~= "" then
if trace_names then
- logs.report("fonts","resolving stage %s, name '%s', style '%s', width '%s'",stage,name,style,width)
+ report_names("resolving stage %s, name '%s', style '%s', width '%s'",stage,name,style,width)
end
s_collect_style_width(found,done,all,style,width,family)
m_collect_style_width(found,done,all,style,width,families,sorted,strictname)
else
if trace_names then
- logs.report("fonts","resolving stage %s, name '%s', style '%s'",stage,name,style)
+ report_names("resolving stage %s, name '%s', style '%s'",stage,name,style)
end
s_collect_style(found,done,all,style,family)
m_collect_style(found,done,all,style,families,sorted,strictname)
end
elseif width and width ~= "" then
if trace_names then
- logs.report("fonts","resolving stage %s, name '%s', width '%s'",stage,name,width)
+ report_names("resolving stage %s, name '%s', width '%s'",stage,name,width)
end
s_collect_width(found,done,all,width,family)
m_collect_width(found,done,all,width,families,sorted,strictname)
else
if trace_names then
- logs.report("fonts","resolving stage %s, name '%s'",stage,name)
+ report_names("resolving stage %s, name '%s'",stage,name)
end
s_collect(found,done,all,family)
m_collect(found,done,all,families,sorted,strictname)
end
end
-function heuristic(name,weight,style,width,variant,all) -- todo: fallbacks
+local function heuristic(name,weight,style,width,variant,all) -- todo: fallbacks
local found, done = { }, { }
--~ print(name,weight,style,width,variant)
weight, style, width, variant = weight or "normal", style or "normal", width or "normal", variant or "normal"
@@ -1238,9 +1271,9 @@ function heuristic(name,weight,style,width,variant,all) -- todo: fallbacks
for i=1,nf do
t[#t+1] = format("'%s'",found[i].fontname)
end
- logs.report("fonts","name '%s' resolved to %s instances: %s",name,nf,concat(t," "))
+ report_names("name '%s' resolved to %s instances: %s",name,nf,concat(t," "))
else
- logs.report("fonts","name '%s' unresolved",name)
+ report_names("name '%s' unresolved",name)
end
end
if all then
@@ -1385,19 +1418,29 @@ function names.lookup(pattern,name,reload) -- todo: find
lookups = families[pattern]
end
if trace_names then
- logs.report("fonts","starting with %s lookups for '%s'",#lookups,pattern)
+ report_names("starting with %s lookups for '%s'",#lookups,pattern)
end
if lookups then
for key, value in gmatch(pattern,"([^=,]+)=([^=,]+)") do
local t = { }
- for i=1,#lookups do
- local s = lookups[i]
- if s[key] == value then
- t[#t+1] = lookups[i]
+ if find(value,"*") then
+ value = string.topattern(value)
+ for i=1,#lookups do
+ local s = lookups[i]
+ if find(s[key],value) then
+ t[#t+1] = lookups[i]
+ end
+ end
+ else
+ for i=1,#lookups do
+ local s = lookups[i]
+ if s[key] == value then
+ t[#t+1] = lookups[i]
+ end
end
end
if trace_names then
- logs.report("fonts","%s matches for key '%s' with value '%s'",#t,key,value)
+ report_names("%s matches for key '%s' with value '%s'",#t,key,value)
end
lookups = t
end
diff --git a/tex/context/base/font-tfm.lua b/tex/context/base/font-tfm.lua
index 31ae2cae1..69cd2707e 100644
--- a/tex/context/base/font-tfm.lua
+++ b/tex/context/base/font-tfm.lua
@@ -14,6 +14,8 @@ local concat, sortedkeys, utfbyte, serialize = table.concat, table.sortedkeys, u
local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
local trace_scaling = false trackers.register("fonts.scaling" , function(v) trace_scaling = v end)
+local report_define = logs.new("define fonts")
+
-- tfmdata has also fast access to indices and unicodes
-- to be checked: otf -> tfm -> tfmscaled
--
@@ -52,11 +54,13 @@ tfm.fontname_mode = "fullpath"
tfm.enhance = tfm.enhance or function() end
+fonts.formats.tfm = "type1" -- we need to have at least a value here
+
function tfm.read_from_tfm(specification)
local fname, tfmdata = specification.filename or "", nil
if fname ~= "" then
if trace_defining then
- logs.report("define font","loading tfm file %s at size %s",fname,specification.size)
+ report_define("loading tfm file %s at size %s",fname,specification.size)
end
tfmdata = font.read_tfm(fname,specification.size) -- not cached, fast enough
if tfmdata then
@@ -79,7 +83,7 @@ function tfm.read_from_tfm(specification)
tfm.enhance(tfmdata,specification)
end
elseif trace_defining then
- logs.report("define font","loading tfm with name %s fails",specification.name)
+ report_define("loading tfm with name %s fails",specification.name)
end
return tfmdata
end
@@ -247,12 +251,12 @@ function tfm.do_scale(tfmtable, scaledpoints, relativeid)
local nodemode = tfmtable.mode == "node"
local hasquality = tfmtable.auto_expand or tfmtable.auto_protrude
local hasitalic = tfmtable.has_italic
+ local descriptions = tfmtable.descriptions or { }
--
t.parameters = { }
t.characters = { }
t.MathConstants = { }
-- fast access
- local descriptions = tfmtable.descriptions or { }
t.unicodes = tfmtable.unicodes
t.indices = tfmtable.indices
t.marks = tfmtable.marks
@@ -355,7 +359,7 @@ t.colorscheme = tfmtable.colorscheme
end
end
-- if trace_scaling then
- -- logs.report("define font","t=%s, u=%s, i=%s, n=%s c=%s",k,chr.tounicode or k,description.index,description.name or '-',description.class or '-')
+ -- report_define("t=%s, u=%s, i=%s, n=%s c=%s",k,chr.tounicode or k,description.index,description.name or '-',description.class or '-')
-- end
if tounicode then
local tu = tounicode[index] -- nb: index!
@@ -391,6 +395,9 @@ t.colorscheme = tfmtable.colorscheme
local vn = v.next
if vn then
chr.next = vn
+ --~ if v.vert_variants or v.horiz_variants then
+ --~ report_define("glyph 0x%05X has combination of next, vert_variants and horiz_variants",index)
+ --~ end
else
local vv = v.vert_variants
if vv then
@@ -560,11 +567,11 @@ t.colorscheme = tfmtable.colorscheme
-- can have multiple subfonts
if hasmath then
if trace_defining then
- logs.report("define font","math enabled for: name '%s', fullname: '%s', filename: '%s'",t.name or "noname",t.fullname or "nofullname",t.filename or "nofilename")
+ report_define("math enabled for: name '%s', fullname: '%s', filename: '%s'",t.name or "noname",t.fullname or "nofullname",t.filename or "nofilename")
end
else
if trace_defining then
- logs.report("define font","math disabled for: name '%s', fullname: '%s', filename: '%s'",t.name or "noname",t.fullname or "nofullname",t.filename or "nofilename")
+ report_define("math disabled for: name '%s', fullname: '%s', filename: '%s'",t.name or "noname",t.fullname or "nofullname",t.filename or "nofilename")
end
t.nomath, t.MathConstants = true, nil
end
@@ -573,8 +580,8 @@ t.colorscheme = tfmtable.colorscheme
t.psname = t.fontname or (t.fullname and fonts.names.cleanname(t.fullname))
end
if trace_defining then
- logs.report("define font","used for accesing subfont: '%s'",t.psname or "nopsname")
- logs.report("define font","used for subsetting: '%s'",t.fontname or "nofontname")
+ report_define("used for accesing subfont: '%s'",t.psname or "nopsname")
+ report_define("used for subsetting: '%s'",t.fontname or "nofontname")
end
--~ print(t.fontname,table.serialize(t.MathConstants))
return t, delta
@@ -713,18 +720,18 @@ function tfm.checked_filename(metadata,whatever)
if askedfilename ~= "" then
foundfilename = resolvers.findbinfile(askedfilename,"") or ""
if foundfilename == "" then
- logs.report("fonts","source file '%s' is not found",askedfilename)
+ report_define("source file '%s' is not found",askedfilename)
foundfilename = resolvers.findbinfile(file.basename(askedfilename),"") or ""
if foundfilename ~= "" then
- logs.report("fonts","using source file '%s' (cache mismatch)",foundfilename)
+ report_define("using source file '%s' (cache mismatch)",foundfilename)
end
end
elseif whatever then
- logs.report("fonts","no source file for '%s'",whatever)
+ report_define("no source file for '%s'",whatever)
foundfilename = ""
end
metadata.foundfilename = foundfilename
- -- logs.report("fonts","using source file '%s'",foundfilename)
+ -- report_define("using source file '%s'",foundfilename)
end
return foundfilename
end
diff --git a/tex/context/base/grph-inc.lua b/tex/context/base/grph-inc.lua
index 508240a3b..c933d6a5f 100644
--- a/tex/context/base/grph-inc.lua
+++ b/tex/context/base/grph-inc.lua
@@ -50,6 +50,8 @@ local trace_programs = false trackers.register("figures.programs", function
local trace_conversion = false trackers.register("figures.conversion", function(v) trace_conversion = v end)
local trace_inclusion = false trackers.register("figures.inclusion", function(v) trace_inclusion = v end)
+local report_graphics = logs.new("graphics")
+
--- some extra img functions ---
local imgkeys = img.keys()
@@ -333,7 +335,7 @@ local function register(askedname,specification)
end
local converter = (newformat ~= format) and figures.converters[format]
if trace_conversion then
- logs.report("figures","checking conversion of '%s': old format '%s', new format '%s', conversion '%s'",
+ report_graphics("checking conversion of '%s': old format '%s', new format '%s', conversion '%s'",
askedname,format,newformat,conversion or "default")
end
if converter then
@@ -375,12 +377,12 @@ local function register(askedname,specification)
local newtime = lfs.attributes(newname,'modification') or 0
if oldtime > newtime then
if trace_conversion then
- logs.report("figures","converting '%s' from '%s' to '%s'",askedname,format,newformat)
+ report_graphics("converting '%s' from '%s' to '%s'",askedname,format,newformat)
end
converter(oldname,newname)
else
if trace_conversion then
- logs.report("figures","no need to convert '%s' from '%s' to '%s'",askedname,format,newformat)
+ report_graphics("no need to convert '%s' from '%s' to '%s'",askedname,format,newformat)
end
end
if io.exists(newname) then
@@ -430,10 +432,17 @@ local function locate(request) -- name, format, cache
end
-- protocol check
local hashed = url.hashed(askedname)
- if hashed and hashed.scheme ~= "file" then
- local foundname = resolvers.findbinfile(askedname)
- if foundname then
- askedname = foundname
+ if hashed then
+ if hashed.scheme == "file" then
+ local path = hashed.path
+ if path and path ~= "" then
+ askedname = path
+ end
+ else
+ local foundname = resolvers.findbinfile(askedname)
+ if foundname then
+ askedname = foundname
+ end
end
end
-- we could use the hashed data instead
@@ -745,11 +754,11 @@ function figures.checkers.generic(data)
figure, data = f or figure, d or data
figures.loaded[hash] = figure
if trace_conversion then
- logs.report("figures","new graphic, hash: %s",hash)
+ report_graphics("new graphic, hash: %s",hash)
end
else
if trace_conversion then
- logs.report("figures","existing graphic, hash: %s",hash)
+ report_graphics("existing graphic, hash: %s",hash)
end
end
if figure then
@@ -820,7 +829,7 @@ function figures.checkers.mov(data)
dr.width, dr.height = width, height
du.width, du.height, du.foundname = width, height, foundname
if trace_inclusion then
- logs.report("figures","including movie '%s': width %s, height %s",foundname,width,height)
+ report_graphics("including movie '%s': width %s, height %s",foundname,width,height)
end
-- we need to push the node.write in between ... we could make a shared helper for this
context.startfoundexternalfigure(width .. "sp",height .. "sp")
@@ -903,7 +912,7 @@ end
local function runprogram(...)
local command = format(...)
if trace_conversion or trace_programs then
- logs.report("figures","running %s",command)
+ report_graphics("running %s",command)
end
os.spawn(command)
end
@@ -977,7 +986,7 @@ figures.programs.convert = {
function gifconverter.pdf(oldname,newname)
local convert = figures.programs.convert
runprogram (
- "convert %s %s",
+ "%s %s %s %s",
convert.command, makeoptions(convert.options), oldname, newname
)
end
diff --git a/tex/context/base/grph-u3d.lua b/tex/context/base/grph-u3d.lua
index f3bf17631..04ddd1f4e 100644
--- a/tex/context/base/grph-u3d.lua
+++ b/tex/context/base/grph-u3d.lua
@@ -10,6 +10,8 @@ if not modules then modules = { } end modules ['grph-u3d'] = {
local trace_inclusion = false trackers.register("figures.inclusion", function(v) trace_inclusion = v end)
+local report_graphics = logs.new("graphics")
+
local pdfannotation = nodes.pdfannotation
local todimen = string.todimen
@@ -19,11 +21,11 @@ function figures.checkers.u3d(data)
local dr, du, ds = data.request, data.used, data.status
local width = todimen(dr.width or figures.defaultwidth)
local height = todimen(dr.height or figures.defaultheight)
- local foundname = du.fullname
+ local foundname = du.report_graphics(
dr.width, dr.height = width, height
du.width, du.height, du.foundname = width, height, foundname
if trace_inclusion then
- logs.report("figures","including u3d '%s': width %s, height %s",foundname,width,height)
+ report_graphics("including u3d '%s': width %s, height %s",foundname,width,height)
end
context.startfoundexternalfigure(width .. "sp",height .. "sp")
context(function()
diff --git a/tex/context/base/l-aux.lua b/tex/context/base/l-aux.lua
index 97063e3bc..aeea79173 100644
--- a/tex/context/base/l-aux.lua
+++ b/tex/context/base/l-aux.lua
@@ -70,11 +70,7 @@ end
function aux.settings_to_hash(str,existing)
if str and str ~= "" then
hash = existing or { }
- if moretolerant then
- lpegmatch(pattern_b_s,str)
- else
- lpegmatch(pattern_a_s,str)
- end
+ lpegmatch(pattern_a_s,str)
return hash
else
return { }
diff --git a/tex/context/base/l-boolean.lua b/tex/context/base/l-boolean.lua
index be7ec7d57..cf8dc0ac8 100644
--- a/tex/context/base/l-boolean.lua
+++ b/tex/context/base/l-boolean.lua
@@ -35,7 +35,7 @@ function toboolean(str,tolerant)
end
end
-function string.is_boolean(str)
+function string.is_boolean(str,default)
if type(str) == "string" then
if str == "true" or str == "yes" or str == "on" or str == "t" then
return true
@@ -43,7 +43,7 @@ function string.is_boolean(str)
return false
end
end
- return nil
+ return default
end
function boolean.alwaystrue()
diff --git a/tex/context/base/l-dir.lua b/tex/context/base/l-dir.lua
index 2643f538b..64ebbeafc 100644
--- a/tex/context/base/l-dir.lua
+++ b/tex/context/base/l-dir.lua
@@ -307,7 +307,7 @@ else
local str, pth, t = "", "", { ... }
for i=1,#t do
local s = t[i]
- if s ~= "" then
+ if s and s ~= "" then -- we catch nil and false
if str ~= "" then
str = str .. "/" .. s
else
diff --git a/tex/context/base/l-file.lua b/tex/context/base/l-file.lua
index 2bfc07090..b528d9a7d 100644
--- a/tex/context/base/l-file.lua
+++ b/tex/context/base/l-file.lua
@@ -10,45 +10,88 @@ if not modules then modules = { } end modules ['l-file'] = {
file = file or { }
-local concat = table.concat
+local insert, concat = table.insert, table.concat
local find, gmatch, match, gsub, sub, char = string.find, string.gmatch, string.match, string.gsub, string.sub, string.char
local lpegmatch = lpeg.match
+local getcurrentdir = lfs.currentdir
-function file.removesuffix(filename)
- return (gsub(filename,"%.[%a%d]+$",""))
+local function dirname(name,default)
+ return match(name,"^(.+)[/\\].-$") or (default or "")
end
-function file.addsuffix(filename, suffix)
- if not suffix or suffix == "" then
- return filename
- elseif not find(filename,"%.[%a%d]+$") then
- return filename .. "." .. suffix
- else
- return filename
- end
+local function basename(name)
+ return match(name,"^.+[/\\](.-)$") or name
end
-function file.replacesuffix(filename, suffix)
- return (gsub(filename,"%.[%a%d]+$","")) .. "." .. suffix
+local function nameonly(name)
+ return (gsub(match(name,"^.+[/\\](.-)$") or name,"%..*$",""))
end
-function file.dirname(name,default)
- return match(name,"^(.+)[/\\].-$") or (default or "")
+local function extname(name,default)
+ return match(name,"^.+%.([^/\\]-)$") or default or ""
end
-function file.basename(name)
- return match(name,"^.+[/\\](.-)$") or name
+local function splitname(name)
+ local n, s = match(name,"^(.+)%.([^/\\]-)$")
+ return n or name, s or ""
end
-function file.nameonly(name)
- return (gsub(match(name,"^.+[/\\](.-)$") or name,"%..*$",""))
+file.basename = basename
+file.dirname = dirname
+file.nameonly = nameonly
+file.extname = extname
+file.suffix = extname
+
+function file.removesuffix(filename)
+ return (gsub(filename,"%.[%a%d]+$",""))
end
-function file.extname(name,default)
- return match(name,"^.+%.([^/\\]-)$") or default or ""
+function file.addsuffix(filename, suffix, criterium)
+ if not suffix or suffix == "" then
+ return filename
+ elseif criterium == true then
+ return filename .. "." .. suffix
+ elseif not criterium then
+ local n, s = splitname(filename)
+ if not s or s == "" then
+ return filename .. "." .. suffix
+ else
+ return filename
+ end
+ else
+ local n, s = splitname(filename)
+ if s and s ~= "" then
+ local t = type(criterium)
+ if t == "table" then
+ -- keep if in criterium
+ for i=1,#criterium do
+ if s == criterium[i] then
+ return filename
+ end
+ end
+ elseif t == "string" then
+ -- keep if criterium
+ if s == criterium then
+ return filename
+ end
+ end
+ end
+ return n .. "." .. suffix
+ end
end
-file.suffix = file.extname
+--~ print("1 " .. file.addsuffix("name","new") .. " -> name.new")
+--~ print("2 " .. file.addsuffix("name.old","new") .. " -> name.old")
+--~ print("3 " .. file.addsuffix("name.old","new",true) .. " -> name.old.new")
+--~ print("4 " .. file.addsuffix("name.old","new","new") .. " -> name.new")
+--~ print("5 " .. file.addsuffix("name.old","new","old") .. " -> name.old")
+--~ print("6 " .. file.addsuffix("name.old","new","foo") .. " -> name.new")
+--~ print("7 " .. file.addsuffix("name.old","new",{"foo","bar"}) .. " -> name.new")
+--~ print("8 " .. file.addsuffix("name.old","new",{"old","bar"}) .. " -> name.old")
+
+function file.replacesuffix(filename, suffix)
+ return (gsub(filename,"%.[%a%d]+$","")) .. "." .. suffix
+end
--~ function file.join(...)
--~ local pth = concat({...},"/")
@@ -101,7 +144,7 @@ end
--~ print(file.join("//nas-1","/y"))
function file.iswritable(name)
- local a = lfs.attributes(name) or lfs.attributes(file.dirname(name,"."))
+ local a = lfs.attributes(name) or lfs.attributes(dirname(name,"."))
return a and sub(a.permissions,2,2) == "w"
end
@@ -140,31 +183,94 @@ end
-- we can hash them weakly
-function file.collapse_path(str)
+--~ function file.old_collapse_path(str) -- fails on b.c/..
+--~ str = gsub(str,"\\","/")
+--~ if find(str,"/") then
+--~ str = gsub(str,"^%./",(gsub(lfs.currentdir(),"\\","/")) .. "/") -- ./xx in qualified
+--~ str = gsub(str,"/%./","/")
+--~ local n, m = 1, 1
+--~ while n > 0 or m > 0 do
+--~ str, n = gsub(str,"[^/%.]+/%.%.$","")
+--~ str, m = gsub(str,"[^/%.]+/%.%./","")
+--~ end
+--~ str = gsub(str,"([^/])/$","%1")
+--~ -- str = gsub(str,"^%./","") -- ./xx in qualified
+--~ str = gsub(str,"/%.$","")
+--~ end
+--~ if str == "" then str = "." end
+--~ return str
+--~ end
+--~
+--~ The previous one fails on "a.b/c" so Taco came up with a split based
+--~ variant. After some skyping we got it sort of compatible with the old
+--~ one. After that the anchoring to currentdir was added in a better way.
+--~ Of course there are some optimizations too. Finally we had to deal with
+--~ windows drive prefixes and thinsg like sys://.
+
+function file.collapse_path(str,anchor)
+ if anchor and not find(str,"^/") and not find(str,"^%a:") then
+ str = getcurrentdir() .. "/" .. str
+ end
+ if str == "" or str =="." then
+ return "."
+ elseif find(str,"^%.%.") then
+ str = gsub(str,"\\","/")
+ return str
+ elseif not find(str,"%.") then
+ str = gsub(str,"\\","/")
+ return str
+ end
str = gsub(str,"\\","/")
- if find(str,"/") then
- str = gsub(str,"^%./",(gsub(lfs.currentdir(),"\\","/")) .. "/") -- ./xx in qualified
- str = gsub(str,"/%./","/")
- local n, m = 1, 1
- while n > 0 or m > 0 do
- str, n = gsub(str,"[^/%.]+/%.%.$","")
- str, m = gsub(str,"[^/%.]+/%.%./","")
+ local starter, rest = match(str,"^(%a+:/*)(.-)$")
+ if starter then
+ str = rest
+ end
+ local oldelements = checkedsplit(str,"/")
+ local newelements = { }
+ local i = #oldelements
+ while i > 0 do
+ local element = oldelements[i]
+ if element == '.' then
+ -- do nothing
+ elseif element == '..' then
+ local n = i -1
+ while n > 0 do
+ local element = oldelements[n]
+ if element ~= '..' and element ~= '.' then
+ oldelements[n] = '.'
+ break
+ else
+ n = n - 1
+ end
+ end
+ if n < 1 then
+ insert(newelements,1,'..')
+ end
+ elseif element ~= "" then
+ insert(newelements,1,element)
end
- str = gsub(str,"([^/])/$","%1")
- -- str = gsub(str,"^%./","") -- ./xx in qualified
- str = gsub(str,"/%.$","")
+ i = i - 1
+ end
+ if #newelements == 0 then
+ return starter or "."
+ elseif starter then
+ return starter .. concat(newelements, '/')
+ elseif find(str,"^/") then
+ return "/" .. concat(newelements,'/')
+ else
+ return concat(newelements, '/')
end
- if str == "" then str = "." end
- return str
end
---~ print(file.collapse_path("/a"))
---~ print(file.collapse_path("a/./b/.."))
---~ print(file.collapse_path("a/aa/../b/bb"))
---~ print(file.collapse_path("a/../.."))
---~ print(file.collapse_path("a/.././././b/.."))
---~ print(file.collapse_path("a/./././b/.."))
---~ print(file.collapse_path("a/b/c/../.."))
+--~ local function test(str)
+--~ print(string.format("%-20s %-15s %-15s",str,file.collapse_path(str),file.collapse_path(str,true)))
+--~ end
+--~ test("a/b.c/d") test("b.c/d") test("b.c/..")
+--~ test("/") test("c:/..") test("sys://..")
+--~ test("") test("./") test(".") test("..") test("./..") test("../..")
+--~ test("a") test("./a") test("/a") test("a/../..")
+--~ test("a/./b/..") test("a/aa/../b/bb") test("a/.././././b/..") test("a/./././b/..")
+--~ test("a/b/c/../..") test("./a/b/c/../..") test("a/b/c/../..")
function file.robustname(str)
return (gsub(str,"[^%a%d%/%-%.\\]+","-"))
diff --git a/tex/context/base/l-lpeg.lua b/tex/context/base/l-lpeg.lua
index b107a8e64..05bbebab9 100644
--- a/tex/context/base/l-lpeg.lua
+++ b/tex/context/base/l-lpeg.lua
@@ -48,6 +48,11 @@ patterns.whitespace = patterns.eol + patterns.spacer
patterns.nonwhitespace = 1 - patterns.whitespace
patterns.utf8 = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
patterns.utfbom = P('\000\000\254\255') + P('\255\254\000\000') + P('\255\254') + P('\254\255') + P('\239\187\191')
+patterns.validutf8 = patterns.utf8^0 * P(-1) * Cc(true) + Cc(false)
+
+patterns.undouble = P('"')/"" * (1-P('"'))^0 * P('"')/""
+patterns.unsingle = P("'")/"" * (1-P("'"))^0 * P("'")/""
+patterns.unspacer = ((patterns.spacer^1)/"")^0
function lpeg.anywhere(pattern) --slightly adapted from website
return P { P(pattern) + 1 * V(1) } -- why so complex?
@@ -163,3 +168,61 @@ local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 6
local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
patterns.utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+
+local cache = { }
+
+function lpeg.stripper(str)
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+end
+
+function lpeg.replacer(t)
+ if #t > 0 then
+ local p
+ for i=1,#t do
+ local ti= t[i]
+ local pp = P(ti[1]) / ti[2]
+ p = (p and p + pp ) or pp
+ end
+ return Cs((p + 1)^0)
+ end
+end
+
+--~ print(utf.check(""))
+--~ print(utf.check("abcde"))
+--~ print(utf.check("abcde\255\123"))
+
+local splitters_f, splitters_s = { }, { }
+
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = C((1 - separator)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = (1 - separator)^0 * separator * C(P(1)^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+--~ print(1,match(lpeg.firstofsplit(":"),"bc:de"))
+--~ print(2,match(lpeg.firstofsplit(":"),":de")) -- empty
+--~ print(3,match(lpeg.firstofsplit(":"),"bc"))
+--~ print(4,match(lpeg.secondofsplit(":"),"bc:de"))
+--~ print(5,match(lpeg.secondofsplit(":"),"bc:")) -- empty
+--~ print(6,match(lpeg.secondofsplit(":",""),"bc"))
+--~ print(7,match(lpeg.secondofsplit(":"),"bc"))
+--~ print(9,match(lpeg.secondofsplit(":","123"),"bc"))
diff --git a/tex/context/base/l-os.lua b/tex/context/base/l-os.lua
index fba2cd317..0d06c4673 100644
--- a/tex/context/base/l-os.lua
+++ b/tex/context/base/l-os.lua
@@ -8,24 +8,95 @@ if not modules then modules = { } end modules ['l-os'] = {
-- maybe build io.flush in os.execute
-local find, format, gsub = string.find, string.format, string.gsub
+local find, format, gsub, upper = string.find, string.format, string.gsub, string.upper
local random, ceil = math.random, math.ceil
+local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
+local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
-local execute, spawn, exec, ioflush = os.execute, os.spawn or os.execute, os.exec or os.execute, io.flush
+-- The following code permits traversing the environment table, at least
+-- in luatex. Internally all environment names are uppercase.
+
+if not os.__getenv__ then
+
+ os.__getenv__ = os.getenv
+ os.__setenv__ = os.setenv
+
+ if os.env then
+
+ local osgetenv = os.getenv
+ local ossetenv = os.setenv
+ local osenv = os.env local _ = osenv.PATH -- initialize the table
+
+ function os.setenv(k,v)
+ if v == nil then
+ v = ""
+ end
+ local K = upper(k)
+ osenv[K] = v
+ ossetenv(K,v)
+ end
+
+ function os.getenv(k)
+ local K = upper(k)
+ local v = osenv[K] or osenv[k] or osgetenv(K) or osgetenv(k)
+ if v == "" then
+ return nil
+ else
+ return v
+ end
+ end
+
+ else
+
+ local ossetenv = os.setenv
+ local osgetenv = os.getenv
+ local osenv = { }
+
+ function os.setenv(k,v)
+ if v == nil then
+ v = ""
+ end
+ local K = upper(k)
+ osenv[K] = v
+ end
+
+ function os.getenv(k)
+ local K = upper(k)
+ local v = osenv[K] or osgetenv(K) or osgetenv(k)
+ if v == "" then
+ return nil
+ else
+ return v
+ end
+ end
+
+ local function __index(t,k)
+ return os.getenv(k)
+ end
+ local function __newindex(t,k,v)
+ os.setenv(k,v)
+ end
+
+ os.env = { }
+
+ setmetatable(os.env, { __index = __index, __newindex = __newindex } )
+
+ end
+
+end
+
+-- end of environment hack
+
+local execute, spawn, exec, iopopen, ioflush = os.execute, os.spawn or os.execute, os.exec or os.execute, io.popen, io.flush
function os.execute(...) ioflush() return execute(...) end
function os.spawn (...) ioflush() return spawn (...) end
function os.exec (...) ioflush() return exec (...) end
+function io.popen (...) ioflush() return iopopen(...) end
function os.resultof(command)
- ioflush() -- else messed up logging
local handle = io.popen(command,"r")
- if not handle then
- -- print("unknown command '".. command .. "' in os.resultof")
- return ""
- else
- return handle:read("*all") or ""
- end
+ return handle and handle:read("*all") or ""
end
--~ os.type : windows | unix (new, we already guessed os.platform)
@@ -102,24 +173,6 @@ end
setmetatable(os,osmt)
-if not os.setenv then
-
- -- we still store them but they won't be seen in
- -- child processes although we might pass them some day
- -- using command concatination
-
- local env, getenv = { }, os.getenv
-
- function os.setenv(k,v)
- env[k] = v
- end
-
- function os.getenv(k)
- return env[k] or getenv(k)
- end
-
-end
-
-- we can use HOSTTYPE on some platforms
local name, platform = os.name or "linux", os.getenv("MTX_PLATFORM") or ""
@@ -240,7 +293,7 @@ elseif name == "kfreebsd" then
-- we sometims have HOSTTYPE set so let's check that first
local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
if find(architecture,"x86_64") then
- platform = "kfreebsd-64"
+ platform = "kfreebsd-amd64"
else
platform = "kfreebsd-i386"
end
diff --git a/tex/context/base/l-pdfview.lua b/tex/context/base/l-pdfview.lua
index 627477ee8..76923e1fc 100644
--- a/tex/context/base/l-pdfview.lua
+++ b/tex/context/base/l-pdfview.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['l-pdfview'] = {
license = "see context related readme files"
}
-local format, getenv = string.format, os.getenv
+local format, concat = string.format, table.concat
pdfview = pdfview or { }
@@ -32,15 +32,15 @@ else
end
pdfview.METHOD = "MTX_PDFVIEW_METHOD"
-pdfview.method = getenv(pdfview.METHOD) or 'default'
+pdfview.method = resolvers.getenv(pdfview.METHOD) or 'default'
pdfview.method = (opencalls[pdfview.method] and pdfview.method) or 'default'
function pdfview.methods()
- return table.concat(table.sortedkeys(opencalls), " ")
+ return concat(table.sortedkeys(opencalls), " ")
end
function pdfview.status()
- return format("pdfview methods: %s, current method: %s, MTX_PDFVIEW_METHOD=%s",pdfview.methods(),pdfview.method,getenv(pdfview.METHOD) or "<unset>")
+ return format("pdfview methods: %s, current method: %s, MTX_PDFVIEW_METHOD=%s",pdfview.methods(),pdfview.method,resolvers.getenv(pdfview.METHOD) or "<unset>")
end
local openedfiles = { }
diff --git a/tex/context/base/l-table.lua b/tex/context/base/l-table.lua
index ee395d0f1..889d25ac6 100644
--- a/tex/context/base/l-table.lua
+++ b/tex/context/base/l-table.lua
@@ -605,7 +605,7 @@ local function serialize(root,name,_handle,_reduce,_noquotes,_hexify)
handle("t={")
end
if root and next(root) then
- do_serialize(root,name,"",0,indexed)
+ do_serialize(root,name,"",0)
end
handle("}")
end
@@ -908,3 +908,14 @@ function table.insert_after_value(t,value,extra)
insert(t,#t+1,extra)
end
+function table.sequenced(t,sep)
+ local s = { }
+ for k, v in next, t do -- indexed?
+ s[#s+1] = k .. "=" .. tostring(v)
+ end
+ return concat(s, sep or " | ")
+end
+
+function table.print(...)
+ print(table.serialize(...))
+end
diff --git a/tex/context/base/l-url.lua b/tex/context/base/l-url.lua
index e3e6f8130..43fe73d2b 100644
--- a/tex/context/base/l-url.lua
+++ b/tex/context/base/l-url.lua
@@ -6,9 +6,10 @@ if not modules then modules = { } end modules ['l-url'] = {
license = "see context related readme files"
}
-local char, gmatch, gsub = string.char, string.gmatch, string.gsub
+local char, gmatch, gsub, format, byte = string.char, string.gmatch, string.gsub, string.format, string.byte
+local concat = table.concat
local tonumber, type = tonumber, type
-local lpegmatch = lpeg.match
+local lpegmatch, lpegP, lpegC, lpegR, lpegS, lpegCs, lpegCc = lpeg.match, lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc
-- from the spec (on the web):
--
@@ -26,22 +27,35 @@ local function tochar(s)
return char(tonumber(s,16))
end
-local colon, qmark, hash, slash, percent, endofstring = lpeg.P(":"), lpeg.P("?"), lpeg.P("#"), lpeg.P("/"), lpeg.P("%"), lpeg.P(-1)
+local colon, qmark, hash, slash, percent, endofstring = lpegP(":"), lpegP("?"), lpegP("#"), lpegP("/"), lpegP("%"), lpegP(-1)
-local hexdigit = lpeg.R("09","AF","af")
-local plus = lpeg.P("+")
-local escaped = (plus / " ") + (percent * lpeg.C(hexdigit * hexdigit) / tochar)
+local hexdigit = lpegR("09","AF","af")
+local plus = lpegP("+")
+local nothing = lpegCc("")
+local escaped = (plus / " ") + (percent * lpegC(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
-local scheme = lpeg.Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon + lpeg.Cc("")
-local authority = slash * slash * lpeg.Cs((escaped+(1- slash-qmark-hash))^0) + lpeg.Cc("")
-local path = slash * lpeg.Cs((escaped+(1- qmark-hash))^0) + lpeg.Cc("")
-local query = qmark * lpeg.Cs((escaped+(1- hash))^0) + lpeg.Cc("")
-local fragment = hash * lpeg.Cs((escaped+(1- endofstring))^0) + lpeg.Cc("")
+local scheme = lpegCs((escaped+(1-colon-slash-qmark-hash))^2) * colon + nothing
+local authority = slash * slash * lpegCs((escaped+(1- slash-qmark-hash))^0) + nothing
+local path = slash * lpegCs((escaped+(1- qmark-hash))^0) + nothing
+local query = qmark * lpegCs((escaped+(1- hash))^0) + nothing
+local fragment = hash * lpegCs((escaped+(1- endofstring))^0) + nothing
local parser = lpeg.Ct(scheme * authority * path * query * fragment)
+lpeg.patterns.urlsplitter = parser
+
+local escapes = { }
+
+for i=0,255 do
+ escapes[i] = format("%%%02X",i)
+end
+
+local escaper = lpeg.Cs((lpegR("09","AZ","az") + lpegS("-./_") + lpegP(1) / escapes)^0)
+
+lpeg.patterns.urlescaper = escaper
+
-- todo: reconsider Ct as we can as well have five return values (saves a table)
-- so we can have two parsers, one with and one without
@@ -54,15 +68,27 @@ end
function url.hashed(str)
local s = url.split(str)
local somescheme = s[1] ~= ""
- return {
- scheme = (somescheme and s[1]) or "file",
- authority = s[2],
- path = s[3],
- query = s[4],
- fragment = s[5],
- original = str,
- noscheme = not somescheme,
- }
+ if not somescheme then
+ return {
+ scheme = "file",
+ authority = "",
+ path = str,
+ query = "",
+ fragment = "",
+ original = str,
+ noscheme = true,
+ }
+ else
+ return {
+ scheme = s[1],
+ authority = s[2],
+ path = s[3],
+ query = s[4],
+ fragment = s[5],
+ original = str,
+ noscheme = false,
+ }
+ end
end
function url.hasscheme(str)
@@ -73,15 +99,25 @@ function url.addscheme(str,scheme)
return (url.hasscheme(str) and str) or ((scheme or "file:///") .. str)
end
-function url.construct(hash)
- local fullurl = hash.sheme .. "://".. hash.authority .. hash.path
- if hash.query then
- fullurl = fullurl .. "?".. hash.query
+function url.construct(hash) -- dodo: we need to escape !
+ local fullurl = { }
+ local scheme, authority, path, query, fragment = hash.scheme, hash.authority, hash.path, hash.query, hash.fragment
+ if scheme and scheme ~= "" then
+ fullurl[#fullurl+1] = scheme .. "://"
+ end
+ if authority and authority ~= "" then
+ fullurl[#fullurl+1] = authority
end
- if hash.fragment then
- fullurl = fullurl .. "?".. hash.fragment
+ if path and path ~= "" then
+ fullurl[#fullurl+1] = "/" .. path
end
- return fullurl
+ if query and query ~= "" then
+ fullurl[#fullurl+1] = "?".. query
+ end
+ if fragment and fragment ~= "" then
+ fullurl[#fullurl+1] = "#".. fragment
+ end
+ return lpegmatch(escaper,concat(fullurl))
end
function url.filename(filename)
@@ -108,12 +144,27 @@ end
--~ print(url.filename("/oeps.txt"))
--~ from the spec on the web (sort of):
---~
---~ function test(str)
---~ print(table.serialize(url.hashed(str)))
+
+--~ local function test(str)
+--~ local t = url.hashed(str)
+--~ t.constructed = url.construct(t)
+--~ print(table.serialize(t))
--~ end
---~
---~ test("%56pass%20words")
+
+--~ test("sys:///./colo-rgb")
+
+--~ test("/data/site/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733/figuur-cow.jpg")
+--~ test("file:///M:/q2p/develop/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733")
+--~ test("M:/q2p/develop/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733")
+--~ test("file:///q2p/develop/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733")
+--~ test("/q2p/develop/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733")
+
+--~ test("file:///cow%20with%20spaces")
+--~ test("file:///cow%20with%20spaces.pdf")
+--~ test("cow%20with%20spaces.pdf")
+--~ test("some%20file")
+--~ test("/etc/passwords")
+--~ test("http://www.myself.com/some%20words.html")
--~ test("file:///c:/oeps.txt")
--~ test("file:///c|/oeps.txt")
--~ test("file:///etc/oeps.txt")
@@ -127,7 +178,6 @@ end
--~ test("tel:+1-816-555-1212")
--~ test("telnet://192.0.2.16:80/")
--~ test("urn:oasis:names:specification:docbook:dtd:xml:4.1.2")
---~ test("/etc/passwords")
--~ test("http://www.pragma-ade.com/spaced%20name")
--~ test("zip:///oeps/oeps.zip#bla/bla.tex")
diff --git a/tex/context/base/l-utils.lua b/tex/context/base/l-utils.lua
index ebc27b8cf..d03426812 100644
--- a/tex/context/base/l-utils.lua
+++ b/tex/context/base/l-utils.lua
@@ -8,7 +8,7 @@ if not modules then modules = { } end modules ['l-utils'] = {
-- hm, quite unreadable
-local gsub = string.gsub
+local gsub, format = string.gsub, string.format
local concat = table.concat
local type, next = type, next
@@ -16,81 +16,79 @@ if not utils then utils = { } end
if not utils.merger then utils.merger = { } end
if not utils.lua then utils.lua = { } end
-utils.merger.m_begin = "begin library merge"
-utils.merger.m_end = "end library merge"
-utils.merger.pattern =
+utils.report = utils.report or print
+
+local merger = utils.merger
+
+merger.strip_comment = true
+
+local m_begin_merge = "begin library merge"
+local m_end_merge = "end library merge"
+local m_begin_closure = "do -- create closure to overcome 200 locals limit"
+local m_end_closure = "end -- of closure"
+
+local m_pattern =
"%c+" ..
- "%-%-%s+" .. utils.merger.m_begin ..
+ "%-%-%s+" .. m_begin_merge ..
"%c+(.-)%c+" ..
- "%-%-%s+" .. utils.merger.m_end ..
+ "%-%-%s+" .. m_end_merge ..
"%c+"
-function utils.merger._self_fake_()
- return
- "-- " .. "created merged file" .. "\n\n" ..
- "-- " .. utils.merger.m_begin .. "\n\n" ..
- "-- " .. utils.merger.m_end .. "\n\n"
-end
+local m_format =
+ "\n\n-- " .. m_begin_merge ..
+ "\n%s\n" ..
+ "-- " .. m_end_merge .. "\n\n"
+
+local m_faked =
+ "-- " .. "created merged file" .. "\n\n" ..
+ "-- " .. m_begin_merge .. "\n\n" ..
+ "-- " .. m_end_merge .. "\n\n"
-function utils.report(...)
- print(...)
+local function self_fake()
+ return m_faked
end
-utils.merger.strip_comment = true
+local function self_nothing()
+ return ""
+end
-function utils.merger._self_load_(name)
- local f, data = io.open(name), ""
- if f then
- utils.report("reading merge from %s",name)
- data = f:read("*all")
- f:close()
+local function self_load(name)
+ local data = io.loaddata(name) or ""
+ if data == "" then
+ utils.report("merge: unknown file %s",name)
else
- utils.report("unknown file to merge %s",name)
- end
- if data and utils.merger.strip_comment then
- -- saves some 20K
- data = gsub(data,"%-%-~[^\n\r]*[\r\n]", "")
+ utils.report("merge: inserting %s",name)
end
return data or ""
end
-function utils.merger._self_save_(name, data)
+local function self_save(name, data)
if data ~= "" then
- local f = io.open(name,'w')
- if f then
- utils.report("saving merge from %s",name)
- f:write(data)
- f:close()
+ if merger.strip_comment then
+ -- saves some 20K
+ local n = #data
+ data = gsub(data,"%-%-~[^\n\r]*[\r\n]","")
+ utils.report("merge: %s bytes of comment stripped, %s bytes of code left",n-#data,#data)
end
+ io.savedata(name,data)
+ utils.report("merge: saving %s",name)
end
end
-function utils.merger._self_swap_(data,code)
- if data ~= "" then
- return (gsub(data,utils.merger.pattern, function(s)
- return "\n\n" .. "-- "..utils.merger.m_begin .. "\n" .. code .. "\n" .. "-- "..utils.merger.m_end .. "\n\n"
- end, 1))
- else
- return ""
- end
+local function self_swap(data,code)
+ return data ~= "" and (gsub(data,m_pattern, function() return format(m_format,code) end, 1)) or ""
end
---~ stripper:
---~
---~ data = gsub(data,"%-%-~[^\n]*\n","")
---~ data = gsub(data,"\n\n+","\n")
-
-function utils.merger._self_libs_(libs,list)
- local result, f, frozen = { }, nil, false
+local function self_libs(libs,list)
+ local result, f, frozen, foundpath = { }, nil, false, nil
result[#result+1] = "\n"
if type(libs) == 'string' then libs = { libs } end
if type(list) == 'string' then list = { list } end
- local foundpath = nil
for i=1,#libs do
local lib = libs[i]
for j=1,#list do
local pth = gsub(list[j],"\\","/") -- file.clean_path
- utils.report("checking library path %s",pth)
+ utils.report("merge: checking library path %s",pth)
local name = pth .. "/" .. lib
if lfs.isfile(name) then
foundpath = pth
@@ -99,76 +97,58 @@ function utils.merger._self_libs_(libs,list)
if foundpath then break end
end
if foundpath then
- utils.report("using library path %s",foundpath)
+ utils.report("merge: using library path %s",foundpath)
local right, wrong = { }, { }
for i=1,#libs do
local lib = libs[i]
local fullname = foundpath .. "/" .. lib
if lfs.isfile(fullname) then
- -- right[#right+1] = lib
- utils.report("merging library %s",fullname)
- result[#result+1] = "do -- create closure to overcome 200 locals limit"
+ utils.report("merge: using library %s",fullname)
+ right[#right+1] = lib
+ result[#result+1] = m_begin_closure
result[#result+1] = io.loaddata(fullname,true)
- result[#result+1] = "end -- of closure"
+ result[#result+1] = m_end_closure
else
- -- wrong[#wrong+1] = lib
- utils.report("no library %s",fullname)
+ utils.report("merge: skipping library %s",fullname)
+ wrong[#wrong+1] = lib
end
end
if #right > 0 then
- utils.report("merged libraries: %s",concat(right," "))
+ utils.report("merge: used libraries: %s",concat(right," "))
end
if #wrong > 0 then
- utils.report("skipped libraries: %s",concat(wrong," "))
+ utils.report("merge: skipped libraries: %s",concat(wrong," "))
end
else
- utils.report("no valid library path found")
+ utils.report("merge: no valid library path found")
end
return concat(result, "\n\n")
end
-function utils.merger.selfcreate(libs,list,target)
+function merger.selfcreate(libs,list,target)
if target then
- utils.merger._self_save_(
- target,
- utils.merger._self_swap_(
- utils.merger._self_fake_(),
- utils.merger._self_libs_(libs,list)
- )
- )
+ self_save(target,self_swap(self_fake(),self_libs(libs,list)))
end
end
-function utils.merger.selfmerge(name,libs,list,target)
- utils.merger._self_save_(
- target or name,
- utils.merger._self_swap_(
- utils.merger._self_load_(name),
- utils.merger._self_libs_(libs,list)
- )
- )
+function merger.selfmerge(name,libs,list,target)
+ self_save(target or name,self_swap(self_load(name),self_libs(libs,list)))
end
-function utils.merger.selfclean(name)
- utils.merger._self_save_(
- name,
- utils.merger._self_swap_(
- utils.merger._self_load_(name),
- ""
- )
- )
+function merger.selfclean(name)
+ self_save(name,self_swap(self_load(name),self_nothing()))
end
-function utils.lua.compile(luafile, lucfile, cleanup, strip) -- defaults: cleanup=false strip=true
- -- utils.report("compiling",luafile,"into",lucfile)
+function utils.lua.compile(luafile,lucfile,cleanup,strip) -- defaults: cleanup=false strip=true
+ utils.report("lua: compiling %s into %s",luafile,lucfile)
os.remove(lucfile)
local command = "-o " .. string.quote(lucfile) .. " " .. string.quote(luafile)
if strip ~= false then
command = "-s " .. command
end
- local done = (os.spawn("texluac " .. command) == 0) or (os.spawn("luac " .. command) == 0)
+ local done = os.spawn("texluac " .. command) == 0 or os.spawn("luac " .. command) == 0
if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
- -- utils.report("removing",luafile)
+ utils.report("lua: removing %s",luafile)
os.remove(luafile)
end
return done
diff --git a/tex/context/base/lang-ini.lua b/tex/context/base/lang-ini.lua
index 239e5390c..501e56448 100644
--- a/tex/context/base/lang-ini.lua
+++ b/tex/context/base/lang-ini.lua
@@ -16,6 +16,8 @@ local lpegmatch = lpeg.match
local trace_patterns = false trackers.register("languages.patterns", function(v) trace_patterns = v end)
+local report_languages = logs.new("languages")
+
languages = languages or {}
languages.version = 1.009
languages.hyphenation = languages.hyphenation or { }
@@ -52,22 +54,22 @@ local command = lpeg.P("\\patterns")
local parser = (1-command)^0 * command * content
local function filterpatterns(filename)
- if file.extname(filename) == "rpl" then
- return io.loaddata(resolvers.find_file(filename)) or ""
- else
+--~ if file.extname(filename) == "rpl" then
+--~ return io.loaddata(resolvers.find_file(filename)) or ""
+--~ else
return lpegmatch(parser,io.loaddata(resolvers.find_file(filename)) or "")
- end
+--~ end
end
local command = lpeg.P("\\hyphenation")
local parser = (1-command)^0 * command * content
local function filterexceptions(filename)
- if file.extname(filename) == "rhl" then
- return io.loaddata(resolvers.find_file(filename)) or ""
- else
- return lpegmatch(parser,io.loaddata(resolvers.find_file(filename)) or {}) -- "" ?
- end
+--~ if file.extname(filename) == "rhl" then
+--~ return io.loaddata(resolvers.find_file(filename)) or ""
+--~ else
+ return lpegmatch(parser,io.loaddata(resolvers.find_file(filename)) or "") -- "" ?
+--~ end
end
local function record(tag)
@@ -100,12 +102,12 @@ local function loadthem(tag, filename, filter, target)
local ok = fullname ~= ""
if ok then
if trace_patterns then
- logs.report("languages","filtering %s for language '%s' from '%s'",target,tag,fullname)
+ report_languages("filtering %s for language '%s' from '%s'",target,tag,fullname)
end
- lang[target](data,filterpatterns(fullname))
+ lang[target](data,filter(fullname) or "")
else
if trace_patterns then
- logs.report("languages","no %s for language '%s' in '%s'",target,tag,filename or "?")
+ report_languages("no %s for language '%s' in '%s'",target,tag,filename or "?")
end
lang[target](data,"")
end
@@ -119,7 +121,35 @@ function languages.hyphenation.loadpatterns(tag, patterns)
end
function languages.hyphenation.loadexceptions(tag, exceptions)
- return loadthem(tag, patterns, filterexceptions, "exceptions")
+ return loadthem(tag, exceptions, filterexceptions, "exceptions")
+end
+
+function languages.hyphenation.loaddefinitions(tag, definitions)
+ statistics.starttiming(languages)
+ local data = record(tag)
+ local fullname = (definitions and definitions ~= "" and resolvers.find_file(definitions)) or ""
+ local patterndata, exceptiondata, ok = "", "", fullname ~= ""
+ if ok then
+ if trace_patterns then
+ report_languages("loading definitions for language '%s' from '%s'",tag,fullname)
+ end
+ local defs = dofile(fullname) -- use regular loader instead
+ if defs then -- todo: version test
+ patterndata = defs.patterns and defs.patterns .data or ""
+ exceptiondata = defs.exceptions and defs.exceptions.data or ""
+ else
+ report_languages("invalid definitions for language '%s' in '%s'",tag,filename or "?")
+ end
+ else
+ if trace_patterns then
+ report_languages("no definitions for language '%s' in '%s'",tag,filename or "?")
+ end
+ end
+ lang.patterns (data,patterndata)
+ lang.exceptions(data,exceptiondata)
+ langdata[tag] = data
+ statistics.stoptiming(languages)
+ return ok
end
function languages.hyphenation.exceptions(tag, ...)
@@ -194,14 +224,18 @@ end
languages.tolang = tolang
-function languages.register(tag,parent,patterns,exceptions)
+function languages.register(tag,parent,patterns,exceptions,definitions)
parent = parent or tag
+ patterns = patterns or format("lang-%s.pat",parent)
+ exceptions = exceptions or format("lang-%s.hyp",parent)
+ definitions = definitions or format("lang-%s.lua",parent)
registered[tag] = {
- parent = parent,
- patterns = patterns or format("lang-%s.pat",parent),
- exceptions = exceptions or format("lang-%s.hyp",parent),
- loaded = false,
- number = 0,
+ parent = parent,
+ patterns = patterns,
+ exceptions = exceptions,
+ definitions = definitions,
+ loaded = false,
+ number = 0,
}
end
@@ -244,15 +278,22 @@ function languages.enable(tags)
if languages.share and number > 0 then
l.number = number
else
- -- we assume the same filenames
l.number = languages.hyphenation.define(tag)
- languages.hyphenation.loadpatterns(tag,l.patterns)
- languages.hyphenation.loadexceptions(tag,l.exceptions)
+ local ok = l.definitions and languages.hyphenation.loaddefinitions(tag,l.definitions)
+ if not ok then
+ -- We will keep this for a while. The lua way is not faster but suits
+ -- the current context mkiv approach a bit better. It's called progress.
+ if trace_patterns then
+ report_languages("falling back on tex files for language with tag %s",tag)
+ end
+ languages.hyphenation.loadpatterns(tag,l.patterns)
+ languages.hyphenation.loadexceptions(tag,l.exceptions)
+ end
numbers[l.number] = tag
end
l.loaded = true
if trace_patterns then
- logs.report("languages","assigning number %s",l.number)
+ report_languages("assigning number %s",l.number)
end
end
if l.number > 0 then
diff --git a/tex/context/base/lang-ini.mkiv b/tex/context/base/lang-ini.mkiv
index 45bb71b85..266370ec7 100644
--- a/tex/context/base/lang-ini.mkiv
+++ b/tex/context/base/lang-ini.mkiv
@@ -146,7 +146,8 @@
"#1",
"#2",
"\truefilename{\f!languageprefix#2.\f!patternsextension}",
- "\truefilename{\f!languageprefix#2.\f!hyphensextension }")
+ "\truefilename{\f!languageprefix#2.\f!hyphensextension}",
+ "\truefilename{\f!languageprefix#2.lua}")
}}
\def\doloadlanguagefiles#1%
diff --git a/tex/context/base/lang-wrd.lua b/tex/context/base/lang-wrd.lua
index 095e44443..c2b5ff6ac 100644
--- a/tex/context/base/lang-wrd.lua
+++ b/tex/context/base/lang-wrd.lua
@@ -10,6 +10,8 @@ local utf = unicode.utf8
local lower, utfchar = string.lower, utf.char
local lpegmatch = lpeg.match
+local report_languages = logs.new("languages")
+
languages.words = languages.words or { }
local words = languages.words
@@ -57,7 +59,7 @@ function words.load(tag,filename)
wordsdata[tag] = list
statistics.stoptiming(languages)
else
- logs.report("languages","missing words file '%s'",filename)
+ report_languages("missing words file '%s'",filename)
end
end
@@ -183,7 +185,7 @@ words.used = list
function words.dump_used_words(name)
if dump then
- logs.report("languages","saving list of used words in '%s'",name)
+ report_languages("saving list of used words in '%s'",name)
io.savedata(name,table.serialize(list))
end
end
diff --git a/tex/context/base/lpdf-ano.lua b/tex/context/base/lpdf-ano.lua
index e9e67e163..f6392fd37 100644
--- a/tex/context/base/lpdf-ano.lua
+++ b/tex/context/base/lpdf-ano.lua
@@ -13,6 +13,10 @@ local trace_references = false trackers.register("references.references", f
local trace_destinations = false trackers.register("references.destinations", function(v) trace_destinations = v end)
local trace_bookmarks = false trackers.register("references.bookmarks", function(v) trace_bookmarks = v end)
+local report_references = logs.new("references")
+local report_destinations = logs.new("destinations")
+local report_bookmarks = logs.new("bookmarks")
+
local variables = interfaces.variables
local constants = interfaces.constants
@@ -231,22 +235,16 @@ end
function nodeinjections.reference(width,height,depth,prerolled)
if prerolled then
- if swapdir then
- width = - width
- end
if trace_references then
- logs.report("references","w=%s, h=%s, d=%s, a=%s",width,height,depth,prerolled)
+ report_references("w=%s, h=%s, d=%s, a=%s",width,height,depth,prerolled)
end
return pdfannotation(width,height,depth,prerolled)
end
end
function nodeinjections.destination(width,height,depth,name,view)
- if swapdir then
- width = - width
- end
if trace_destinations then
- logs.report("destinations","w=%s, h=%s, d=%s, n=%s, v=%s",width,height,depth,name,view or "no view")
+ report_destinations("w=%s, h=%s, d=%s, n=%s, v=%s",width,height,depth,name,view or "no view")
end
return pdfdestination(width,height,depth,name,view)
end
@@ -267,7 +265,7 @@ runners["inner"] = function(var,actions)
end
runners["inner with arguments"] = function(var,actions)
- logs.report("references","todo: inner with arguments")
+ report_references("todo: inner with arguments")
return false
end
@@ -287,7 +285,7 @@ runners["special outer with operation"] = function(var,actions)
end
runners["special outer"] = function(var,actions)
- logs.report("references","todo: special outer")
+ report_references("todo: special outer")
return false
end
@@ -297,22 +295,22 @@ runners["special"] = function(var,actions)
end
runners["outer with inner with arguments"] = function(var,actions)
- logs.report("references","todo: outer with inner with arguments")
+ report_references("todo: outer with inner with arguments")
return false
end
runners["outer with special and operation and arguments"] = function(var,actions)
- logs.report("references","todo: outer with special and operation and arguments")
+ report_references("todo: outer with special and operation and arguments")
return false
end
runners["outer with special"] = function(var,actions)
- logs.report("references","todo: outer with special")
+ report_references("todo: outer with special")
return false
end
runners["outer with special and operation"] = function(var,actions)
- logs.report("references","todo: outer with special and operation")
+ report_references("todo: outer with special and operation")
return false
end
@@ -528,7 +526,7 @@ local function build(levels,start,parent,method)
local level, title, reference, open = li[1], li[2], li[3], li[4]
if level == startlevel then
if trace_bookmarks then
- logs.report("bookmark","%3i %s%s %s",reference.realpage,rep(" ",level-1),(open and "+") or "-",title)
+ report_bookmarks("%3i %s%s %s",reference.realpage,rep(" ",level-1),(open and "+") or "-",title)
end
local prev = child
child = pdfreserveobject()
diff --git a/tex/context/base/lpdf-fld.lua b/tex/context/base/lpdf-fld.lua
index c034aec6c..893962b0e 100644
--- a/tex/context/base/lpdf-fld.lua
+++ b/tex/context/base/lpdf-fld.lua
@@ -15,6 +15,8 @@ local lpegmatch = lpeg.match
local trace_fields = false trackers.register("widgets.fields", function(v) trace_fields = v end)
+local report_fields = logs.new("fields")
+
local texsprint, ctxcatcodes = tex.sprint, tex.ctxcatcodes
local variables = interfaces.variables
@@ -255,7 +257,7 @@ local function fieldstates(specification,forceyes,values,default)
return
end
local v = aux.settings_to_array(values)
- local yes, off
+ local yes, off, yesn, yesr, yesd, offn, offr, offd
if #v == 1 then
yes, off = v[1], v[1]
else
@@ -423,7 +425,7 @@ function codeinjections.definefield(specification)
local kind = specification.kind
if not kind then
if trace_fields then
- logs.report("fields","invalid definition of '%s': unknown type",n)
+ report_fields("invalid definition of '%s': unknown type",n)
end
elseif kind == "radio" then
local values = specification.values
@@ -434,10 +436,10 @@ function codeinjections.definefield(specification)
end
fields[n] = specification
if trace_fields then
- logs.report("fields","defining '%s' as radio",n or "?")
+ report_fields("defining '%s' as radio",n or "?")
end
elseif trace_fields then
- logs.report("fields","invalid definition of radio '%s': missing values",n)
+ report_fields("invalid definition of radio '%s': missing values",n)
end
elseif kind == "sub" then
-- not in main field list !
@@ -449,16 +451,16 @@ function codeinjections.definefield(specification)
end
if trace_fields then
local p = radios[n] and radios[n].parent
- logs.report("fields","defining '%s' as sub of radio '%s'",n or "?",p or "?")
+ report_fields("defining '%s' as sub of radio '%s'",n or "?",p or "?")
end
elseif trace_fields then
- logs.report("fields","invalid definition of radio sub '%s': no parent",n)
+ report_fields("invalid definition of radio sub '%s': no parent",n)
end
predefinesymbols(specification)
elseif kind == "text" or kind == "line" then
fields[n] = specification
if trace_fields then
- logs.report("fields","defining '%s' as %s",n,kind)
+ report_fields("defining '%s' as %s",n,kind)
end
if specification.values ~= "" and specification.default == "" then
specification.default, specification.values = specification.values, nil
@@ -466,12 +468,12 @@ function codeinjections.definefield(specification)
else
fields[n] = specification
if trace_fields then
- logs.report("fields","defining '%s' as %s",n,kind)
+ report_fields("defining '%s' as %s",n,kind)
end
predefinesymbols(specification)
end
elseif trace_fields then
- logs.report("fields","invalid definition of '%s': already defined",n)
+ report_fields("invalid definition of '%s': already defined",n)
end
end
@@ -479,22 +481,22 @@ function codeinjections.clonefield(specification)
local p, c, v = specification.parent, specification.children, specification.variant
if not p or not c then
if trace_fields then
- logs.report("fields","invalid clone: children: '%s', parent '%s', variant: '%s'",p or "?",c or "?", v or "?")
+ report_fields("invalid clone: children: '%s', parent '%s', variant: '%s'",p or "?",c or "?", v or "?")
end
else
for n in gmatch(c,"[^, ]+") do
local f, r, c, x = fields[n], radios[n], clones[n], fields[p]
if f or r or c then
if trace_fields then
- logs.report("fields","already cloned: child: '%s', parent '%s', variant: '%s'",p or "?",n or "?", v or "?")
+ report_fields("already cloned: child: '%s', parent '%s', variant: '%s'",p or "?",n or "?", v or "?")
end
elseif x then
if trace_fields then
- logs.report("fields","invalid clone: child: '%s', variant: '%s', no parent",n or "?", v or "?")
+ report_fields("invalid clone: child: '%s', variant: '%s', no parent",n or "?", v or "?")
end
else
if trace_fields then
- logs.report("fields","cloning: child: '%s', parent '%s', variant: '%s'",p or "?",n or "?", v or "?")
+ report_fields("cloning: child: '%s', parent '%s', variant: '%s'",p or "?",n or "?", v or "?")
end
clones[n] = specification
predefinesymbols(specification)
@@ -601,7 +603,7 @@ local methods = { }
function codeinjections.typesetfield(name,specification)
local field = fields[name] or radios[name] or clones[name]
if not field then
- logs.report("fields", "unknown child '%s'",name)
+ report_fields( "unknown child '%s'",name)
-- unknown field
return
end
@@ -613,7 +615,7 @@ function codeinjections.typesetfield(name,specification)
if method then
method(name,specification,variant)
else
- logs.report("fields", "unknown method '%s' for child '%s'",field.kind,name)
+ report_fields( "unknown method '%s' for child '%s'",field.kind,name)
end
end
@@ -638,12 +640,12 @@ end
function methods.line(name,specification,variant,extras)
local field = fields[name]
if variant == "copy" or variant == "clone" then
- logs.report("fields","todo: clones of text fields")
+ report_fields("todo: clones of text fields")
end
local kind = field.kind
if not field.pobj then
if trace_fields then
- logs.report("fields","using parent text '%s'",name)
+ report_fields("using parent text '%s'",name)
end
if extras then
enhance(specification,extras)
@@ -669,7 +671,7 @@ function methods.line(name,specification,variant,extras)
end
specification = field.specification or { } -- todo: radio spec
if trace_fields then
- logs.report("fields","using child text '%s'",name)
+ report_fields("using child text '%s'",name)
end
local d = pdfdictionary {
Subtype = pdf_widget,
@@ -692,13 +694,13 @@ end
function methods.choice(name,specification,variant,extras)
local field = fields[name]
if variant == "copy" or variant == "clone" then
- logs.report("fields","todo: clones of choice fields")
+ report_fields("todo: clones of choice fields")
end
local kind = field.kind
local d
if not field.pobj then
if trace_fields then
- logs.report("fields","using parent choice '%s'",name)
+ report_fields("using parent choice '%s'",name)
end
if extras then
enhance(specification,extras)
@@ -718,7 +720,7 @@ function methods.choice(name,specification,variant,extras)
end
specification = field.specification or { }
if trace_fields then
- logs.report("fields","using child choice '%s'",name)
+ report_fields("using child choice '%s'",name)
end
local d = pdfdictionary {
Subtype = pdf_widget,
@@ -746,13 +748,13 @@ function methods.check(name,specification,variant)
-- contrary to radio there is no way to associate then
local field = fields[name]
if variant == "copy" or variant == "clone" then
- logs.report("fields","todo: clones of check fields")
+ report_fields("todo: clones of check fields")
end
local kind = field.kind
local appearance, default = fieldstates(field,true)
if not field.pobj then
if trace_fields then
- logs.report("fields","using parent check '%s'",name)
+ report_fields("using parent check '%s'",name)
end
local d = pdfdictionary {
Subtype = pdf_widget,
@@ -773,7 +775,7 @@ function methods.check(name,specification,variant)
end
specification = field.specification or { } -- todo: radio spec
if trace_fields then
- logs.report("fields","using child check '%s'",name)
+ report_fields("using child check '%s'",name)
end
local d = pdfdictionary {
Subtype = pdf_widget,
@@ -794,12 +796,12 @@ end
function methods.push(name,specification,variant)
local field = fields[name]
if variant == "copy" or variant == "clone" then
- logs.report("fields","todo: clones of push fields")
+ report_fields("todo: clones of push fields")
end
local kind = field.kind
if not field.pobj then
if trace_fields then
- logs.report("fields","using parent push '%s'",name)
+ report_fields("using parent push '%s'",name)
end
enhance(specification,"PushButton")
local d = pdfdictionary {
@@ -818,7 +820,7 @@ function methods.push(name,specification,variant)
end
specification = field.specification or { } -- todo: radio spec
if trace_fields then
- logs.report("fields","using child push '%s'",name)
+ report_fields("using child push '%s'",name)
end
local d = pdfdictionary {
Subtype = pdf_widget,
@@ -851,7 +853,7 @@ function methods.sub(name,specification,variant)
local default = radiodefault(parent,field)
if not parent.pobj then
if trace_fields then
- logs.report("fields","using parent '%s' of radio '%s' with values '%s' and default '%s'",parent.name,name,parent.values or "?",parent.default or "?")
+ report_fields("using parent '%s' of radio '%s' with values '%s' and default '%s'",parent.name,name,parent.values or "?",parent.default or "?")
end
local specification = parent.specification or { }
-- enhance(specification,"Radio,RadiosInUnison")
@@ -868,7 +870,7 @@ function methods.sub(name,specification,variant)
save_parent(parent,specification,d)
end
if trace_fields then
- logs.report("fields","using child radio '%s' with values '%s'",name,values or "?")
+ report_fields("using child radio '%s' with values '%s'",name,values or "?")
end
local d = pdfdictionary {
Subtype = pdf_widget,
diff --git a/tex/context/base/lpdf-ini.lua b/tex/context/base/lpdf-ini.lua
index e0ffd4052..e0cc98c27 100644
--- a/tex/context/base/lpdf-ini.lua
+++ b/tex/context/base/lpdf-ini.lua
@@ -21,6 +21,8 @@ local trace_resources = false trackers.register("backend.resources", function
local trace_objects = false trackers.register("backend.objects", function(v) trace_objects = v end)
local trace_detail = false trackers.register("backend.detail", function(v) trace_detail = v end)
+local report_backends = logs.new("backends")
+
lpdf = lpdf or { }
local function tosixteen(str)
@@ -185,18 +187,18 @@ local tostring_v = function(t)
end
end
-local function value_x(t) return t end -- the call is experimental
-local function value_s(t,key) return t[1] end -- the call is experimental
-local function value_u(t,key) return t[1] end -- the call is experimental
-local function value_n(t,key) return t[1] end -- the call is experimental
-local function value_c(t) return sub(t[1],2) end -- the call is experimental
-local function value_d(t) return tostring_d(t,true,key) end -- the call is experimental
-local function value_a(t) return tostring_a(t,true,key) end -- the call is experimental
-local function value_z() return nil end -- the call is experimental
-local function value_t(t) return t.value or true end -- the call is experimental
-local function value_f(t) return t.value or false end -- the call is experimental
-local function value_r() return t[1] end -- the call is experimental
-local function value_v() return t[1] end -- the call is experimental
+local function value_x(t) return t end -- the call is experimental
+local function value_s(t,key) return t[1] end -- the call is experimental
+local function value_u(t,key) return t[1] end -- the call is experimental
+local function value_n(t,key) return t[1] end -- the call is experimental
+local function value_c(t) return sub(t[1],2) end -- the call is experimental
+local function value_d(t) return tostring_d(t,true) end -- the call is experimental
+local function value_a(t) return tostring_a(t,true) end -- the call is experimental
+local function value_z() return nil end -- the call is experimental
+local function value_t(t) return t.value or true end -- the call is experimental
+local function value_f(t) return t.value or false end -- the call is experimental
+local function value_r() return t[1] end -- the call is experimental
+local function value_v() return t[1] end -- the call is experimental
local function add_x(t,k,v) rawset(t,k,tostring(v)) end
@@ -333,10 +335,10 @@ function lpdf.reserveobject(name)
if name then
names[name] = r
if trace_objects then
- logs.report("backends", "reserving object number %s under name '%s'",r,name)
+ report_backends("reserving object number %s under name '%s'",r,name)
end
elseif trace_objects then
- logs.report("backends", "reserving object number %s",r)
+ report_backends("reserving object number %s",r)
end
return r
end
@@ -349,25 +351,25 @@ function lpdf.flushobject(name,data)
if name then
if trace_objects then
if trace_detail then
- logs.report("backends", "flushing object data to reserved object with name '%s' -> %s",name,tostring(data))
+ report_backends("flushing object data to reserved object with name '%s' -> %s",name,tostring(data))
else
- logs.report("backends", "flushing object data to reserved object with name '%s'",name)
+ report_backends("flushing object data to reserved object with name '%s'",name)
end
end
return pdfimmediateobj(name,tostring(data))
else
if trace_objects then
if trace_detail then
- logs.report("backends", "flushing object data to reserved object with number %s -> %s",name,tostring(data))
+ report_backends("flushing object data to reserved object with number %s -> %s",name,tostring(data))
else
- logs.report("backends", "flushing object data to reserved object with number %s",name)
+ report_backends("flushing object data to reserved object with number %s",name)
end
end
return pdfimmediateobj(tostring(data))
end
else
if trace_objects and trace_detail then
- logs.report("backends", "flushing object data -> %s",tostring(name))
+ report_backends("flushing object data -> %s",tostring(name))
end
return pdfimmediateobj(tostring(name))
end
@@ -463,7 +465,7 @@ local function set(where,f,when,what)
local w = where[when]
w[#w+1] = f
if trace_finalizers then
- logs.report("backend","%s set: [%s,%s]",what,when,#w)
+ report_backends("%s set: [%s,%s]",what,when,#w)
end
end
@@ -472,7 +474,7 @@ local function run(where,what)
local w = where[i]
for j=1,#w do
if trace_finalizers then
- logs.report("backend","%s finalizer: [%s,%s]",what,i,j)
+ report_backends("%s finalizer: [%s,%s]",what,i,j)
end
w[j]()
end
@@ -499,22 +501,28 @@ function lpdf.finalizedocument()
if not environment.initex then
run(documentfinalizers,"document")
function lpdf.finalizedocument()
- logs.report("backend","serious error: the document is finalized multiple times")
+ report_backends("serious error: the document is finalized multiple times")
function lpdf.finalizedocument() end
end
end
end
+if callbacks.known("finish_pdffile") then
+ callbacks.register("finish_pdffile",function() if not environment.initex then run(documentfinalizers,"document") end end)
+ function lpdf.finalizedocument() end
+end
+
+
-- some minimal tracing, handy for checking the order
local function trace_set(what,key)
if trace_resources then
- logs.report("backend", "setting key '%s' in '%s'",key,what)
+ report_backends("setting key '%s' in '%s'",key,what)
end
end
local function trace_flush(what)
if trace_resources then
- logs.report("backend", "flushing '%s'",what)
+ report_backends("flushing '%s'",what)
end
end
diff --git a/tex/context/base/lpdf-pdx.mkiv b/tex/context/base/lpdf-pdx.mkiv
index ffb7f5269..87e903de6 100644
--- a/tex/context/base/lpdf-pdx.mkiv
+++ b/tex/context/base/lpdf-pdx.mkiv
@@ -28,7 +28,7 @@
% - ProfileCS (GRAY,RGB,CMYK)
% - ICCVersion (bytes 8..11 from the header of the ICC profile, as a hex string)
-\registerctxluafile{lpdf-pdx} {}
+\registerctxluafile{lpdf-pdx}{1.001}
% \def\embedICCprofile#1#2% colorspace, name
% {\ctxlua{backends.codeinjections.addiccprofile("#1","#2")}}
diff --git a/tex/context/base/luat-bas.mkiv b/tex/context/base/luat-bas.mkiv
index 581a5d95a..9b5dcbc5b 100644
--- a/tex/context/base/luat-bas.mkiv
+++ b/tex/context/base/luat-bas.mkiv
@@ -11,28 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-% \writestatus{loading}{ConTeXt Lua Macros / Basic Lua Libraries}
-
-%D This will move cq. become configurable. The XML like output is just
-%D an example.
-
-% todo \let\normaleverytoks\everytoks \newtoks\everytoke \normaleverytoks{\the\everytoks}
-
-\chardef\statuswidth=15
-\chardef\statuswrite=16
-
-\newtoks\everywritestring
-
-\def\writedirect {\immediate\write\statuswrite}
-\def\writeline {\writedirect{}}
-\def\writestring#1{\begingroup\the\everywritestring\writedirect{#1}\endgroup}
-
-\ifx\normalwritestatus\undefined \def\normalwritestatus#1#2{\writedirect{#1 : #2}} \fi
-
-% Because all libs are also on bytecodes we can start without stub. However,
-% some initializations need to take place before the \TEX\ engine itself
-% kicks in, especially memory settings and so. In due time we might make the
-% stub smaller and just create a configuration startup file.
+\writestatus{loading}{ConTeXt Lua Macros / Basic Lua Libraries}
\registerctxluafile{l-string} {1.001}
\registerctxluafile{l-lpeg} {1.001}
diff --git a/tex/context/base/luat-cbk.lua b/tex/context/base/luat-cbk.lua
index 3cb63ad6e..adf90f2d4 100644
--- a/tex/context/base/luat-cbk.lua
+++ b/tex/context/base/luat-cbk.lua
@@ -6,12 +6,15 @@ if not modules then modules = { } end modules ['luat-cbk'] = {
license = "see context related readme files"
}
-local insert, remove, find = table.insert, table.remove, string.find
+local insert, remove, find, format = table.insert, table.remove, string.find, string.format
local collectgarbage, type, next = collectgarbage, type, next
local round = math.round
local trace_checking = false trackers.register("memory.checking", function(v) trace_checking = v end)
+local report_callbacks = logs.new("callbacks")
+local report_memory = logs.new("memory")
+
--[[ldx--
<p>Callbacks are the real asset of <l n='luatex'/>. They permit you to hook
your own code into the <l n='tex'/> engine. Here we implement a few handy
@@ -27,14 +30,64 @@ functions.</p>
--ldx]]--
local trace_callbacks = false trackers.register("system.callbacks", function(v) trace_callbacks = v end)
+local trace_calls = false -- only used when analyzing performance and initializations
+
+local register_callback, find_callback, list_callbacks = callback.register, callback.find, callback.list
+local frozen, stack, list = { }, { }, callbacks.list
+
+if not callbacks.list then -- otherwise counters get reset
+
+ list = list_callbacks()
+
+ for k, _ in next, list do
+ list[k] = 0
+ end
+
+ callbacks.list = list
+
+end
+
+local delayed = table.tohash {
+ "buildpage_filter",
+}
+
-local register_callback, find_callback = callback.register, callback.find
-local frozen, stack = { }, { }
+if not callback.original_register_callback then
-callback.original_register_callback = register_callback
+ callback.original_register_callback = register_callback
+
+ local original_register_callback = register_callback
+
+ if trace_calls then
+
+ local functions = { }
+
+ register_callback = function(name,func)
+ if type(func) == "function" then
+ if functions[name] then
+ functions[name] = func
+ return find_callback(name)
+ else
+ functions[name] = func
+ local cnuf = function(...)
+ list[name] = list[name] + 1
+ return functions[name](...)
+ end
+ return original_register_callback(name,cnuf)
+ end
+ else
+ return original_register_callback(name,func)
+ end
+ end
+
+ end
+
+end
+
+callback.register = register_callback
local function frozen_message(what,name)
- logs.report("callbacks","not %s frozen '%s' (%s)",what,name,frozen[name])
+ report_callbacks("not %s frozen '%s' (%s)",what,name,frozen[name])
end
local function frozen_callback(name)
@@ -52,14 +105,17 @@ local function state(name)
end
end
+function callbacks.known(name)
+ return list[name]
+end
+
function callbacks.report()
- local list = callback.list()
- for name, func in table.sortedhash(list) do
+ for name, _ in table.sortedhash(list) do
local str = frozen[name]
if str then
- logs.report("callbacks","%s: %s -> %s",state(name),name,str)
+ report_callbacks("%s: %s -> %s",state(name),name,str)
else
- logs.report("callbacks","%s: %s",state(name),name)
+ report_callbacks("%s: %s",state(name),name)
end
end
end
@@ -67,7 +123,7 @@ end
function callbacks.table()
local NC, NR, verbatim = context.NC, context.NR, context.type
context.starttabulate { "|l|l|p|" }
- for name, func in table.sortedhash(callback.list()) do
+ for name, _ in table.sortedhash(list) do
NC() verbatim(name) NC() verbatim(state(name)) NC() context(frozen[name] or "") NC() NR()
end
context.stoptabulate()
@@ -75,11 +131,9 @@ end
function callbacks.freeze(name,freeze)
freeze = type(freeze) == "string" and freeze
---~ print(name)
if find(name,"%*") then
local pattern = name -- string.simpleesc(name)
- local list = callback.list()
- for name, func in next, list do
+ for name, _ in next, list do
if find(name,pattern) then
frozen[name] = freeze or frozen[name] or "frozen"
end
@@ -98,6 +152,9 @@ function callbacks.register(name,func,freeze)
elseif freeze then
frozen[name] = (type(freeze) == "string" and freeze) or "registered"
end
+ if delayed[name] and environment.initex then
+ return nil
+ end
return register_callback(name,func)
end
@@ -138,6 +195,18 @@ function callbacks.pop(name)
end
end
+if trace_calls then
+ statistics.register("callback details", function()
+ local t = { } -- todo: pass function to register and quit at nil
+ for name, n in table.sortedhash(list) do
+ if n > 0 then
+ t[#t+1] = format("%s -> %s",name,n)
+ end
+ end
+ return t
+ end)
+end
+
--~ -- somehow crashes later on
--~
--~ callbacks.freeze("find_.*_file","finding file")
@@ -238,7 +307,7 @@ function garbagecollector.check(size,criterium)
local b = collectgarbage("count")
collectgarbage("collect")
local a = collectgarbage("count")
- logs.report("memory","forced sweep, collected: %s MB, used: %s MB",round((b-a)/1000),round(a/1000))
+ report_memory("forced sweep, collected: %s MB, used: %s MB",round((b-a)/1000),round(a/1000))
else
collectgarbage("collect")
end
diff --git a/tex/context/base/luat-cnf.lua b/tex/context/base/luat-cnf.lua
index e45aceb79..054de7c81 100644
--- a/tex/context/base/luat-cnf.lua
+++ b/tex/context/base/luat-cnf.lua
@@ -8,27 +8,37 @@ if not modules then modules = { } end modules ['luat-cnf'] = {
local format, concat, find = string.format, table.concat, string.find
+texconfig.kpse_init = false
+texconfig.shell_escape = 't'
+
luatex = luatex or { }
luatex.variablenames = {
- 'main_memory', 'extra_mem_bot', 'extra_mem_top',
- 'buf_size','expand_depth',
- 'font_max', 'font_mem_size',
- 'hash_extra', 'max_strings', 'pool_free', 'pool_size', 'string_vacancies',
- 'obj_tab_size', 'pdf_mem_size', 'dest_names_size',
- 'nest_size', 'param_size', 'save_size', 'stack_size','expand_depth',
- 'trie_size', 'hyph_size', 'max_in_open',
- 'ocp_stack_size', 'ocp_list_size', 'ocp_buf_size',
- 'max_print_line',
+ 'buf_size', -- 3000
+ 'dvi_buf_size', -- 16384
+ 'error_line', -- 79
+ 'expand_depth', -- 10000
+ 'half_error_line', -- 50
+ 'hash_extra', -- 0
+ 'nest_size', -- 50
+ 'max_in_open', -- 15
+ 'max_print_line', -- 79
+ 'max_strings', -- 15000
+ 'ocp_stack_size', -- 1000
+ 'ocp_list_size', -- 1000
+ 'ocp_buf_size', -- 1000
+ 'param_size', -- 60
+ 'pk_dpi', -- 72
+ 'save_size', -- 4000
+ 'stack_size', -- 300
+ 'strings_free', -- 100
}
function luatex.variables()
- local t, x = { }, nil
+ local t = { }
for _,v in next, luatex.variablenames do
- x = resolvers.var_value(v)
- if x and find(x,"^%d+$") then
- t[v] = tonumber(x)
- end
+ local x = resolvers.var_value(v)
+ t[v] = tonumber(x) or x
end
return t
end
@@ -48,7 +58,7 @@ luatex = luatex or { }
-- we provide our own file handling
-texconfig.kpse_init = false
+texconfig.kpse_init = false
texconfig.shell_escape = 't'
-- as soon as possible
@@ -61,12 +71,13 @@ function texconfig.init()
-- shortcut and helper
- local b = lua.bytecode
-
local function init(start)
+ local b = lua.bytecode
local i = start
while b[i] do
- b[i]() ; b[i] = nil ; i = i + 1
+ b[i]() ;
+ b[i] = nil ;
+ i = i + 1
-- collectgarbage('step')
end
return i - start
@@ -89,27 +100,25 @@ end)
-- done, from now on input and callbacks are internal
]]
-function luatex.dumpstate(name,firsttable)
- if tex and tex.luatexversion < 38 then
- os.remove(name)
- elseif true then
- local t = {
- "-- this file is generated, don't change it\n",
- "-- configuration (can be overloaded later)\n"
- }
- for _,v in next, luatex.variablenames do
- local tv = texconfig[v]
- if tv then
- t[#t+1] = format("texconfig.%s=%s",v,tv)
- end
+local function makestub()
+ name = name or (environment.jobname .. ".lui")
+ firsttable = firsttable or lua.firstbytecode
+ local t = {
+ "-- this file is generated, don't change it\n",
+ "-- configuration (can be overloaded later)\n"
+ }
+ for _,v in next, luatex.variablenames do
+ local tv = texconfig[v]
+ if tv and tv ~= "" then
+ t[#t+1] = format("texconfig.%s=%s",v,tv)
end
- io.savedata(name,format("%s\n\n%s",concat(t,"\n"),format(stub,firsttable or 501)))
- else
- io.savedata(name,format(stub,firsttable or 501))
end
+ io.savedata(name,format("%s\n\n%s",concat(t,"\n"),format(stub,firsttable)))
end
-texconfig.kpse_init = false
-texconfig.max_print_line = 100000
-texconfig.max_in_open = 127
-texconfig.shell_escape = 't'
+lua.registerfinalizer(makestub)
+
+-- to be moved here:
+--
+-- statistics.report_storage("log")
+-- statistics.save_fmt_status("\jobname","\contextversion","context.tex")
diff --git a/tex/context/base/luat-cod.lua b/tex/context/base/luat-cod.lua
new file mode 100644
index 000000000..4a1a3d6f0
--- /dev/null
+++ b/tex/context/base/luat-cod.lua
@@ -0,0 +1,141 @@
+if not modules then modules = { } end modules ['luat-cod'] = {
+ version = 1.001,
+ comment = "companion to luat-cod.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local match, gsub, find = string.match, string.gsub, string.find
+
+-- some basic housekeeping
+
+texconfig.kpse_init = false
+texconfig.shell_escape = 't'
+texconfig.max_print_line = 100000
+texconfig.max_in_open = 127
+
+-- registering bytecode chunks
+
+lua.bytecode = lua.bytecode or { } -- built in anyway
+lua.bytedata = lua.bytedata or { }
+lua.bytedone = lua.bytedone or { }
+
+local bytecode, bytedata, bytedone = lua.bytecode, lua.bytedata, lua.bytedone
+
+lua.firstbytecode = 501
+lua.lastbytecode = lua.lastbytecode or (lua.firstbytecode - 1) -- as we load ourselves again ... maybe return earlier
+
+function lua.registeredcodes()
+ return lua.lastbytecode - lua.firstbytecode + 1
+end
+
+function lua.registercode(filename,version)
+ local barename = gsub(filename,"%.[%a%d]+$","")
+ if barename == filename then filename = filename .. ".lua" end
+ local basename = match(barename,"^.+[/\\](.-)$") or barename
+ if not bytedone[barename] then
+ local code = environment.luafilechunk(filename)
+ if code then
+ assert(code)()
+ bytedone[barename] = true
+ if environment.initex then
+ local n = lua.lastbytecode + 1
+ bytedata[n] = { barename, version }
+ bytecode[n] = code
+ lua.lastbytecode = n
+ end
+ end
+ end
+end
+
+local finalizers = { }
+
+function lua.registerfinalizer(f)
+ if type(f) == "function"then
+ finalizers[#finalizers+1] = f
+ end
+end
+
+function lua.finalize()
+ for i=1,#finalizers do
+ finalizers[i]()
+ end
+end
+
+-- A first start with environments. This will be overloaded later.
+
+local sourcefile = arg and arg[1] or ""
+local sourcepath = find(sourcefile,"/") and gsub(sourcefile,"/[^/]+$","") or ""
+local targetpath = "."
+
+environment = environment or { }
+
+-- delayed (via metatable):
+--
+-- environment.jobname = tex.jobname
+-- environment.version = tostring(tex.toks.contextversiontoks)
+
+environment.initex = tex.formatname == ""
+
+if not environment.luafilechunk then
+
+ function environment.luafilechunk(filename)
+ if sourcepath ~= "" then
+ filename = sourcepath .. "/" .. filename
+ end
+ local data = loadfile(filename)
+ texio.write("<",data and "+ " or "- ",filename,">")
+ return data
+ end
+
+end
+
+-- We need a few premature callbacks in the format generator. We
+-- also do this when the format is loaded as otherwise we get
+-- a kpse error when disabled. Thi sis en angine issue that will
+-- be sorted out in due time.
+
+local function source_file(name)
+ local fullname = sourcepath .. "/" .. name
+ if lfs.isfile(fullname) then
+ return fullname
+ end
+ fullname = fullname .. ".tex"
+ if lfs.isfile(fullname) then
+ return fullname
+ end
+ if lfs.isfile(name) then
+ return name
+ end
+ name = name .. ".tex"
+ if lfs.isfile(name) then
+ return name
+ end
+ return nil
+end
+
+local function target_file(name)
+ return targetpath .. "/" .. name
+end
+
+local function find_read_file (id,name)
+ return source_file(name)
+end
+
+local function find_write_file(id,name)
+ return target_file(name)
+end
+
+local function open_read_file(name)
+ local f = io.open(name,'rb')
+ return {
+ reader = function()
+ return f:read("*line")
+ end
+ }
+end
+
+callback.register('find_read_file' , find_read_file )
+callback.register('open_read_file' , open_read_file)
+callback.register('find_write_file', find_write_file)
diff --git a/tex/context/base/luat-cod.mkiv b/tex/context/base/luat-cod.mkiv
index d3b37d0e1..94d245a3f 100644
--- a/tex/context/base/luat-cod.mkiv
+++ b/tex/context/base/luat-cod.mkiv
@@ -13,6 +13,12 @@
% \writestatus{loading}{ConTeXt Lua Macros / Code}
+\long\def\lastexpanded{} % todo: elsewhere we use \@@expanded
+
+\long\def\expanded#1{\long\xdef\lastexpanded{\noexpand#1}\lastexpanded}
+
+\newif\ifproductionrun
+
%D Originally we compiled the lua files externally and loaded
%D then at runtime, but when the amount grew, we realized that
%D we needed away to store them in the format, which is what
@@ -31,6 +37,10 @@
%D scripts need to share data anyway. So eventually \LUATEX\ got only
%D one instance. Because each call is reentrant there is not much
%D danger for crashes.
+%D
+%D Most code here has changed after version 0.60 as part of adaption to
+%D new functionality. We no longer support the hooks for initializing
+%D code as this can be done at the \LUA\ end.
\def\ctxdirectlua{\directlua\zerocount}
\def\ctxlatelua {\latelua \zerocount}
@@ -46,116 +56,11 @@
\edef\luaversion{\ctxlua{tex.print(_VERSION)}}
-%D We want to define \LUA\ related things in the format but
-%D need to reload code because \LUA\ instances themselves are
-%D not dumped into the format.
-
-\newtoks\everyloadluacode
-\newtoks\everyfinalizeluacode
-
-\normaleveryjob{\the\everyloadluacode\the\everyfinalizeluacode\the\everyjob}
-
-\newif\ifproductionrun
-
-%D Here we operate in the \TEX\ catcode regime as we haven't yet defined
-%D catcode regimes. A chicken or egg problem.
-
-\normalprotected\long\def\startruntimeluacode#1\stopruntimeluacode % only simple code (load +init)
- {\ifproductionrun
- \global\let\startruntimeluacode\relax
- \global\let\stopruntimeluacode \relax
- \else
- \global\everyloadluacode\expandafter{\the\everyloadluacode#1}%
- \fi
- #1} % maybe no interference
-
-\normalprotected\long\def\startruntimectxluacode#1\stopruntimectxluacode
- {\startruntimeluacode\ctxlua{#1}\stopruntimeluacode}
-
-%D Next we load the initialization code.
-
-\startruntimectxluacode
- environment = environment or { }
- environment.jobname = "\jobname" % tex.jobname
- environment.initex = \ifproductionrun false \else true \fi % tex.formatname == ""
- environment.version = "\fmtversion"
-\stopruntimectxluacode
-
-% we start at 500, below this, we store predefined data (dumps)
-
-\newcount\luabytecodecounter \luabytecodecounter=500
-
-\startruntimectxluacode
- lua.bytedata = lua.bytedata or { }
-\stopruntimectxluacode
-
-%D Handy when we expand:
-
-\let\stopruntimeluacode \relax
-\let\stopruntimectxluacode\relax
-
-\long\def\lastexpanded{} % todo: elsewhere we use \@@expanded
-
-\long\def\expanded#1{\long\xdef\lastexpanded{\noexpand#1}\lastexpanded}
-
-%D More code:
-
-% \def\ctxluabytecode#1% executes an already loaded chunk
-% {\ctxlua {
-% local str = ''
-% if lua.bytedata[#1] then
-% str = " from file " .. lua.bytedata[#1][1] .. " version " .. lua.bytedata[#1][2]
-% end
-% if lua.bytecode[#1] then
-% if environment.initex then
-% texio.write_nl("bytecode: executing blob " .. "#1" .. str)
-% assert(lua.bytecode[#1])()
-% else
-% texio.write_nl("bytecode: initializing blob " .. "#1" .. str)
-% assert(lua.bytecode[#1])()
-% lua.bytecode[#1] = nil
-% end
-% else
-% texio.write_nl("bytecode: invalid blob " .. "#1" .. str)
-% end
-% }}
-
-\def\ctxluabytecode#1% executes an already loaded chunk
- {\ctxlua {
- local lbc = lua.bytecode
- if lbc[#1] then
- assert(lbc[#1])()
- if not environment.initex then
- lbc[#1] = nil
- end
- end
- }}
-
-\def\ctxluabyteload#1#2% registers and compiles chunk
- {\global\advance\luabytecodecounter \plusone
- \normalexpanded{\startruntimectxluacode
- lua.bytedata[\the\luabytecodecounter] = { "#1", "#2" }
- \stopruntimectxluacode}%
- \ctxlua {
- lua.bytedata[\the\luabytecodecounter] = { "#1", "#2" }
- lua.bytecode[\the\luabytecodecounter] = environment.luafilechunk("#1")
- }}
-
-\def\ctxloadluafile#1#2% load a (either not compiled) chunk at runtime
- {\doifelsenothing{#2}
- {\ctxlua{environment.loadluafile("#1")}}
- {\ctxlua{environment.loadluafile("#1",#2)}}}
+\def\registerctxluafile#1#2{\ctxlua{lua.registercode("#1","#2")}}
+\def\ctxloadluafile #1{\ctxlua{lua.registercode("#1")}}
-\def\registerctxluafile#1#2% name version (modules and core code)
- {\ifproductionrun
- \ctxloadluafile{#1}{#2}%
- \else
- \ctxluabyteload{#1}{#2}% can go away
- \fi
- \global\everyloadluacode\expandafter\expandafter\expandafter{\expandafter\the\expandafter\everyloadluacode
- \expandafter\ctxluabytecode\expandafter{\the\luabytecodecounter}}%
- \ctxluabytecode{\the\luabytecodecounter}}
+\registerctxluafile{luat-cod}{1.001}
-\everydump\expandafter{\the\everydump\ctxlua{luatex.dumpstate(environment.jobname..".lui",501)}}
+\everydump\expandafter{\the\everydump\ctxlua{lua.finalize()}}
\endinput
diff --git a/tex/context/base/luat-dum.lua b/tex/context/base/luat-dum.lua
index 4530c2ef3..dd9f7460e 100644
--- a/tex/context/base/luat-dum.lua
+++ b/tex/context/base/luat-dum.lua
@@ -1,5 +1,5 @@
if not modules then modules = { } end modules ['luat-dum'] = {
- version = 1.001,
+ version = 1.100,
comment = "companion to luatex-*.tex",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
@@ -28,15 +28,16 @@ experiments = {
enable = dummyfunction,
disable = dummyfunction,
}
-storage = {
+storage = { -- probably no longer needed
register = dummyfunction,
shared = { },
}
logs = {
+ new = function() return dummyfunction end,
report = dummyfunction,
simple = dummyfunction,
}
-tasks = {
+tasks = { -- no longer needed
new = dummyfunction,
actions = dummyfunction,
appendaction = dummyfunction,
@@ -80,27 +81,80 @@ end
-- usage as I don't want any dependency at all. Also, ConTeXt might have
-- different needs and tricks added.
+--~ containers.usecache = true
+
caches = { }
---~ containers.usecache = true
+local writable, readables = nil, { }
+
+if not caches.namespace or caches.namespace == "" or caches.namespace == "context" then
+ caches.namespace = 'generic'
+end
+
+do
-function caches.setpath(category,subcategory)
- local root = kpse.var_value("TEXMFCACHE") or ""
- if root == "" then
- root = kpse.var_value("VARTEXMF") or ""
+ local cachepaths = kpse.expand_path('$TEXMFCACHE') or ""
+
+ if cachepaths == "" then
+ cachepaths = kpse.expand_path('$VARTEXMF')
+ end
+
+ if cachepaths == "" then
+ cachepaths = "."
end
- if root ~= "" then
- root = file.join(root,category)
- lfs.mkdir(root)
- root = file.join(root,subcategory)
- lfs.mkdir(root)
- return lfs.isdir(root) and root
+
+ cachepaths = string.split(cachepaths,os.type == "windows" and ";" or ":")
+
+ for i=1,#cachepaths do
+ if file.iswritable(cachepaths[i]) then
+ writable = file.join(cachepaths[i],"luatex-cache")
+ lfs.mkdir(writable)
+ writable = file.join(writable,caches.namespace)
+ lfs.mkdir(writable)
+ break
+ end
end
+
+ for i=1,#cachepaths do
+ if file.isreadable(cachepaths[i]) then
+ readables[#readables+1] = file.join(cachepaths[i],"luatex-cache",caches.namespace)
+ end
+ end
+
+ if not writable then
+ texio.write_nl("quiting: fix your writable cache path")
+ os.exit()
+ elseif #readables == 0 then
+ texio.write_nl("quiting: fix your readable cache path")
+ os.exit()
+ elseif #readables == 1 and readables[1] == writable then
+ texio.write(string.format("(using cache: %s)",writable))
+ else
+ texio.write(string.format("(using write cache: %s)",writable))
+ texio.write(string.format("(using read cache: %s)",table.concat(readables, " ")))
+ end
+
+end
+
+function caches.getwritablepath(category,subcategory)
+ local path = file.join(writable,category)
+ lfs.mkdir(path)
+ path = file.join(path,subcategory)
+ lfs.mkdir(path)
+ return path
+end
+
+function caches.getreadablepaths(category,subcategory)
+ local t = { }
+ for i=1,#readables do
+ t[i] = file.join(readables[i],category,subcategory)
+ end
+ return t
end
local function makefullname(path,name)
if path and path ~= "" then
- name = "temp-" and name -- clash prevention
+ name = "temp-" .. name -- clash prevention
return file.addsuffix(file.join(path,name),"lua")
end
end
@@ -110,17 +164,21 @@ function caches.iswritable(path,name)
return fullname and file.iswritable(fullname)
end
-function caches.loaddata(path,name)
- local fullname = makefullname(path,name)
- if fullname then
- local data = loadfile(fullname)
- return data and data()
+function caches.loaddata(paths,name)
+ for i=1,#paths do
+ local fullname = makefullname(paths[i],name)
+ if fullname then
+ texio.write(string.format("(load: %s)",fullname))
+ local data = loadfile(fullname)
+ return data and data()
+ end
end
end
function caches.savedata(path,name,data)
local fullname = makefullname(path,name)
if fullname then
+ texio.write(string.format("(save: %s)",fullname))
table.tofile(fullname,data,'return',false,true,false)
end
end
diff --git a/tex/context/base/luat-env.lua b/tex/context/base/luat-env.lua
index 0e21fca31..9463eff55 100644
--- a/tex/context/base/luat-env.lua
+++ b/tex/context/base/luat-env.lua
@@ -14,6 +14,8 @@ if not modules then modules = { } end modules ['luat-env'] = {
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local report_resolvers = logs.new("resolvers")
+
local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find
local unquote, quote = string.unquote, string.quote
@@ -28,11 +30,13 @@ end
-- dirty tricks
if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaonly" then
- arg[-1]=arg[0] arg[0]=arg[2] for k=3,#arg do arg[k-2]=arg[k] end arg[#arg]=nil arg[#arg]=nil
-end
-
-if profiler and os.env["MTX_PROFILE_RUN"] == "YES" then
- profiler.start("luatex-profile.log")
+ arg[-1] = arg[0]
+ arg[ 0] = arg[2]
+ for k=3,#arg do
+ arg[k-2] = arg[k]
+ end
+ arg[#arg] = nil -- last
+ arg[#arg] = nil -- pre-last
end
-- environment
@@ -42,9 +46,33 @@ environment.arguments = { }
environment.files = { }
environment.sortedflags = nil
-if not environment.jobname or environment.jobname == "" then if tex then environment.jobname = tex.jobname end end
-if not environment.version or environment.version == "" then environment.version = "unknown" end
-if not environment.jobname then environment.jobname = "unknown" end
+local mt = {
+ __index = function(_,k)
+ if k == "version" then
+ local version = tex.toks and tex.toks.contextversiontoks
+ if version and version ~= "" then
+ rawset(environment,"version",version)
+ return version
+ else
+ return "unknown"
+ end
+ elseif k == "jobname" or k == "formatname" then
+ local name = tex and tex[k]
+ if name or name== "" then
+ rawset(environment,k,name)
+ return name
+ else
+ return "unknown"
+ end
+ elseif k == "outputfilename" then
+ local name = environment.jobname
+ rawset(environment,k,name)
+ return name
+ end
+ end
+}
+
+setmetatable(environment,mt)
function environment.initialize_arguments(arg)
local arguments, files = { }, { }
@@ -100,8 +128,6 @@ function environment.argument(name,partial)
return nil
end
-environment.argument("x",true)
-
function environment.split_arguments(separator) -- rather special, cut-off before separator
local done, before, after = false, { }, { }
local original_arguments = environment.original_arguments
@@ -197,28 +223,20 @@ end
environment.loadedluacode = loadfile -- can be overloaded
---~ function environment.loadedluacode(name)
---~ if os.spawn("texluac -s -o texluac.luc " .. name) == 0 then
---~ local chunk = loadstring(io.loaddata("texluac.luc"))
---~ os.remove("texluac.luc")
---~ return chunk
---~ else
---~ environment.loadedluacode = loadfile -- can be overloaded
---~ return loadfile(name)
---~ end
---~ end
-
-function environment.luafilechunk(filename) -- used for loading lua bytecode in the format
+function environment.luafilechunk(filename,silent) -- used for loading lua bytecode in the format
filename = file.replacesuffix(filename, "lua")
local fullname = environment.luafile(filename)
if fullname and fullname ~= "" then
+ local data = environment.loadedluacode(fullname)
if trace_locating then
- logs.report("fileio","loading file %s", fullname)
+ report_resolvers("loading file %s%s", fullname, not data and " failed" or "")
+ elseif not silent then
+ texio.write("<",data and "+ " or "- ",fullname,">")
end
- return environment.loadedluacode(fullname)
+ return data
else
if trace_locating then
- logs.report("fileio","unknown file %s", filename)
+ report_resolvers("unknown file %s", filename)
end
return nil
end
@@ -238,7 +256,7 @@ function environment.loadluafile(filename, version)
local fullname = (lucname and environment.luafile(lucname)) or ""
if fullname ~= "" then
if trace_locating then
- logs.report("fileio","loading %s", fullname)
+ report_resolvers("loading %s", fullname)
end
chunk = loadfile(fullname) -- this way we don't need a file exists check
end
@@ -256,7 +274,7 @@ function environment.loadluafile(filename, version)
return true
else
if trace_locating then
- logs.report("fileio","version mismatch for %s: lua=%s, luc=%s", filename, v, version)
+ report_resolvers("version mismatch for %s: lua=%s, luc=%s", filename, v, version)
end
environment.loadluafile(filename)
end
@@ -267,12 +285,12 @@ function environment.loadluafile(filename, version)
fullname = (luaname and environment.luafile(luaname)) or ""
if fullname ~= "" then
if trace_locating then
- logs.report("fileio","loading %s", fullname)
+ report_resolvers("loading %s", fullname)
end
chunk = loadfile(fullname) -- this way we don't need a file exists check
if not chunk then
if trace_locating then
- logs.report("fileio","unknown file %s", filename)
+ report_resolvers("unknown file %s", filename)
end
else
assert(chunk)()
diff --git a/tex/context/base/luat-exe.lua b/tex/context/base/luat-exe.lua
index ca3b75162..9aa5c78d0 100644
--- a/tex/context/base/luat-exe.lua
+++ b/tex/context/base/luat-exe.lua
@@ -9,6 +9,8 @@ if not modules then modules = { } end modules ['luat-exe'] = {
local match, find = string.match, string.find
local concat = table.concat
+local report_executer = logs.new("executer")
+
if not executer then executer = { } end
executer.permitted = { }
@@ -26,18 +28,20 @@ end
function executer.finalize() -- todo: os.exec, todo: report ipv print
local execute = os.execute
function executer.execute(...)
- local t, name, arguments = {...}, "", ""
+ -- todo: make more clever first split
+ local t, name, arguments = { ... }, "", ""
+ local one = t[1]
if #t == 1 then
- if type(t[1]) == 'table' then
- name, arguments = t[1], concat(t," ",2,#t)
+ if type(one) == 'table' then
+ name, arguments = one, concat(t," ",2,#t)
else
- name, arguments = match(t[1],"^(.-)%s+(.+)$")
+ name, arguments = match(one,"^(.-)%s+(.+)$")
if not (name and arguments) then
- name, arguments = t[1], ""
+ name, arguments = one, ""
end
end
else
- name, arguments = t[1], concat(t," ",2,#t)
+ name, arguments = one, concat(t," ",2,#t)
end
local permitted = executer.permitted
for k=1,#permitted do
@@ -46,16 +50,14 @@ function executer.finalize() -- todo: os.exec, todo: report ipv print
execute(name .. " " .. arguments)
-- print("executed: " .. name .. " " .. arguments)
else
- print("not permitted: " .. name .. " " .. arguments)
+ report_executer("not permitted: %s %s",name,arguments)
end
end
end
function executer.finalize()
- print("executer is already finalized")
- end
- function executer.register(name)
- print("executer is already finalized")
+ report_executer("already finalized")
end
+ executer.register = executer.finalize
os.execute = executer.execute
end
@@ -69,3 +71,20 @@ end
--~ executer.execute("dir *.tex")
--~ executer.execute("ls *.tex")
--~ os.execute('ls')
+
+function executer.check()
+ local mode = resolvers.variable("command_mode")
+ local list = resolvers.variable("command_list")
+ if mode == "none" then
+ executer.finalize()
+ elseif mode == "list" and list ~= "" then
+ for s in string.gmatch("[^%s,]",list) do
+ executer.register(s)
+ end
+ executer.finalize()
+ else
+ -- all
+ end
+end
+
+--~ executer.check()
diff --git a/tex/context/base/luat-fio.lua b/tex/context/base/luat-fio.lua
index 0d1bd1808..090850331 100644
--- a/tex/context/base/luat-fio.lua
+++ b/tex/context/base/luat-fio.lua
@@ -11,13 +11,10 @@ local texiowrite = (texio and texio.write) or print
local format = string.format
-texconfig.kpse_init = false
-texconfig.trace_file_names = true -- also influences pdf fonts reporting .. todo
-texconfig.max_print_line = 100000
-
-kpse = { } setmetatable(kpse, { __index = function(k,v) return input[v] end } )
-
--- if still present, we overload kpse (put it off-line so to say)
+texconfig.kpse_init = false
+texconfig.shell_escape = 't'
+texconfig.max_in_open = 127
+texconfig.max_print_line = 100000
if not resolvers.instance then
@@ -27,52 +24,54 @@ if not resolvers.instance then
resolvers.instance.engine = 'luatex'
resolvers.instance.validfile = resolvers.validctxfile
+--~ trackers.enable("resolvers.*")
resolvers.load()
+--~ trackers.disable("resolvers.*")
if callback then
- callback.register('find_read_file' , function(id,name) return resolvers.findtexfile(name) end)
- callback.register('open_read_file' , function( name) return resolvers.opentexfile(name) end)
-
- callback.register('find_data_file' , function(name) return resolvers.findbinfile(name,"tex") end)
- callback.register('find_enc_file' , function(name) return resolvers.findbinfile(name,"enc") end)
- callback.register('find_font_file' , function(name) return resolvers.findbinfile(name,"tfm") end)
- callback.register('find_format_file' , function(name) return resolvers.findbinfile(name,"fmt") end)
- callback.register('find_image_file' , function(name) return resolvers.findbinfile(name,"tex") end)
- callback.register('find_map_file' , function(name) return resolvers.findbinfile(name,"map") end)
- callback.register('find_ocp_file' , function(name) return resolvers.findbinfile(name,"ocp") end)
- callback.register('find_opentype_file' , function(name) return resolvers.findbinfile(name,"otf") end)
- callback.register('find_output_file' , function(name) return name end)
- callback.register('find_pk_file' , function(name) return resolvers.findbinfile(name,"pk") end)
- callback.register('find_sfd_file' , function(name) return resolvers.findbinfile(name,"sfd") end)
- callback.register('find_truetype_file' , function(name) return resolvers.findbinfile(name,"ttf") end)
- callback.register('find_type1_file' , function(name) return resolvers.findbinfile(name,"pfb") end)
- callback.register('find_vf_file' , function(name) return resolvers.findbinfile(name,"vf") end)
-
- callback.register('read_data_file' , function(file) return resolvers.loadbinfile(file,"tex") end)
- callback.register('read_enc_file' , function(file) return resolvers.loadbinfile(file,"enc") end)
- callback.register('read_font_file' , function(file) return resolvers.loadbinfile(file,"tfm") end)
+ callbacks.register('find_read_file' , function(id,name) return resolvers.findtexfile(name) end, true)
+ callbacks.register('open_read_file' , function( name) return resolvers.opentexfile(name) end, true)
+
+ callbacks.register('find_data_file' , function(name) return resolvers.findbinfile(name,"tex") end, true)
+ callbacks.register('find_enc_file' , function(name) return resolvers.findbinfile(name,"enc") end, true)
+ callbacks.register('find_font_file' , function(name) return resolvers.findbinfile(name,"tfm") end, true)
+ callbacks.register('find_format_file' , function(name) return resolvers.findbinfile(name,"fmt") end, true)
+ callbacks.register('find_image_file' , function(name) return resolvers.findbinfile(name,"tex") end, true)
+ callbacks.register('find_map_file' , function(name) return resolvers.findbinfile(name,"map") end, true)
+ callbacks.register('find_ocp_file' , function(name) return resolvers.findbinfile(name,"ocp") end, true)
+ callbacks.register('find_opentype_file' , function(name) return resolvers.findbinfile(name,"otf") end, true)
+ callbacks.register('find_output_file' , function(name) return name end, true)
+ callbacks.register('find_pk_file' , function(name) return resolvers.findbinfile(name,"pk") end, true)
+ callbacks.register('find_sfd_file' , function(name) return resolvers.findbinfile(name,"sfd") end, true)
+ callbacks.register('find_truetype_file' , function(name) return resolvers.findbinfile(name,"ttf") end, true)
+ callbacks.register('find_type1_file' , function(name) return resolvers.findbinfile(name,"pfb") end, true)
+ callbacks.register('find_vf_file' , function(name) return resolvers.findbinfile(name,"vf") end, true)
+
+ callbacks.register('read_data_file' , function(file) return resolvers.loadbinfile(file,"tex") end, true)
+ callbacks.register('read_enc_file' , function(file) return resolvers.loadbinfile(file,"enc") end, true)
+ callbacks.register('read_font_file' , function(file) return resolvers.loadbinfile(file,"tfm") end, true)
-- format
-- image
- callback.register('read_map_file' , function(file) return resolvers.loadbinfile(file,"map") end)
- callback.register('read_ocp_file' , function(file) return resolvers.loadbinfile(file,"ocp") end)
+ callbacks.register('read_map_file' , function(file) return resolvers.loadbinfile(file,"map") end, true)
+ callbacks.register('read_ocp_file' , function(file) return resolvers.loadbinfile(file,"ocp") end, true)
-- output
- callback.register('read_pk_file' , function(file) return resolvers.loadbinfile(file,"pk") end) -- 600dpi/manfnt.720pk
- callback.register('read_sfd_file' , function(file) return resolvers.loadbinfile(file,"sfd") end)
- callback.register('read_vf_file' , function(file) return resolvers.loadbinfile(file,"vf" ) end)
+ callbacks.register('read_pk_file' , function(file) return resolvers.loadbinfile(file,"pk") end, true) -- 600dpi/manfnt.720pk
+ callbacks.register('read_sfd_file' , function(file) return resolvers.loadbinfile(file,"sfd") end, true)
+ callbacks.register('read_vf_file' , function(file) return resolvers.loadbinfile(file,"vf" ) end, true)
- callback.register('find_font_file' , function(name) return resolvers.findbinfile(name,"ofm") end)
- callback.register('find_vf_file' , function(name) return resolvers.findbinfile(name,"ovf") end)
+ callbacks.register('find_font_file' , function(name) return resolvers.findbinfile(name,"ofm") end, true)
+ callbacks.register('find_vf_file' , function(name) return resolvers.findbinfile(name,"ovf") end, true)
- callback.register('read_font_file' , function(file) return resolvers.loadbinfile(file,"ofm") end)
- callback.register('read_vf_file' , function(file) return resolvers.loadbinfile(file,"ovf") end)
+ callbacks.register('read_font_file' , function(file) return resolvers.loadbinfile(file,"ofm") end, true)
+ callbacks.register('read_vf_file' , function(file) return resolvers.loadbinfile(file,"ovf") end, true)
- -- callback.register('read_opentype_file' , function(file) return resolvers.loadbinfile(file,"otf") end)
- -- callback.register('read_truetype_file' , function(file) return resolvers.loadbinfile(file,"ttf") end)
- -- callback.register('read_type1_file' , function(file) return resolvers.loadbinfile(file,"pfb") end)
+ -- callbacks.register('read_opentype_file' , function(file) return resolvers.loadbinfile(file,"otf") end, true)
+ -- callbacks.register('read_truetype_file' , function(file) return resolvers.loadbinfile(file,"ttf") end, true)
+ -- callbacks.register('read_type1_file' , function(file) return resolvers.loadbinfile(file,"pfb") end, true)
- callback.register('find_write_file' , function(id,name) return name end)
- callback.register('find_format_file' , function(name) return name end)
+ callbacks.register('find_write_file' , function(id,name) return name end, true)
+ callbacks.register('find_format_file' , function(name) return name end, true)
end
diff --git a/tex/context/base/luat-fmt.lua b/tex/context/base/luat-fmt.lua
new file mode 100644
index 000000000..d9c0e38c8
--- /dev/null
+++ b/tex/context/base/luat-fmt.lua
@@ -0,0 +1,117 @@
+if not modules then modules = { } end modules ['luat-fmt'] = {
+ version = 1.001,
+ comment = "companion to mtxrun",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- helper for mtxrun
+
+function environment.make_format(name)
+ -- change to format path (early as we need expanded paths)
+ local olddir = lfs.currentdir()
+ local path = caches.getwritablepath("formats") or "" -- maybe platform
+ if path ~= "" then
+ lfs.chdir(path)
+ end
+ logs.simple("format path: %s",lfs.currentdir())
+ -- check source file
+ local texsourcename = file.addsuffix(name,"tex")
+ local fulltexsourcename = resolvers.find_file(texsourcename,"tex") or ""
+ if fulltexsourcename == "" then
+ logs.simple("no tex source file with name: %s",texsourcename)
+ lfs.chdir(olddir)
+ return
+ else
+ logs.simple("using tex source file: %s",fulltexsourcename)
+ end
+ local texsourcepath = dir.expand_name(file.dirname(fulltexsourcename)) -- really needed
+ -- check specification
+ local specificationname = file.replacesuffix(fulltexsourcename,"lus")
+ local fullspecificationname = resolvers.find_file(specificationname,"tex") or ""
+ if fullspecificationname == "" then
+ specificationname = file.join(texsourcepath,"context.lus")
+ fullspecificationname = resolvers.find_file(specificationname,"tex") or ""
+ end
+ if fullspecificationname == "" then
+ logs.simple("unknown stub specification: %s",specificationname)
+ lfs.chdir(olddir)
+ return
+ end
+ local specificationpath = file.dirname(fullspecificationname)
+ -- load specification
+ local usedluastub = nil
+ local usedlualibs = dofile(fullspecificationname)
+ if type(usedlualibs) == "string" then
+ usedluastub = file.join(file.dirname(fullspecificationname),usedlualibs)
+ elseif type(usedlualibs) == "table" then
+ logs.simple("using stub specification: %s",fullspecificationname)
+ local texbasename = file.basename(name)
+ local luastubname = file.addsuffix(texbasename,"lua")
+ local lucstubname = file.addsuffix(texbasename,"luc")
+ -- pack libraries in stub
+ logs.simple("creating initialization file: %s",luastubname)
+ utils.merger.selfcreate(usedlualibs,specificationpath,luastubname)
+ -- compile stub file (does not save that much as we don't use this stub at startup any more)
+ local strip = resolvers.boolean_variable("LUACSTRIP", true)
+ if utils.lua.compile(luastubname,lucstubname,false,strip) and lfs.isfile(lucstubname) then
+ logs.simple("using compiled initialization file: %s",lucstubname)
+ usedluastub = lucstubname
+ else
+ logs.simple("using uncompiled initialization file: %s",luastubname)
+ usedluastub = luastubname
+ end
+ else
+ logs.simple("invalid stub specification: %s",fullspecificationname)
+ lfs.chdir(olddir)
+ return
+ end
+ -- generate format
+ local q = string.quote
+ local command = string.format("luatex --ini --lua=%s %s %sdump",q(usedluastub),q(fulltexsourcename),os.platform == "unix" and "\\\\" or "\\")
+ logs.simple("running command: %s\n",command)
+ os.spawn(command)
+ -- remove related mem files
+ local pattern = file.removesuffix(file.basename(usedluastub)).."-*.mem"
+ -- logs.simple("removing related mplib format with pattern '%s'", pattern)
+ local mp = dir.glob(pattern)
+ if mp then
+ for i=1,#mp do
+ local name = mp[i]
+ logs.simple("removing related mplib format %s", file.basename(name))
+ os.remove(name)
+ end
+ end
+ lfs.chdir(olddir)
+end
+
+function environment.run_format(name,data,more)
+ -- hm, rather old code here; we can now use the file.whatever functions
+ if name and name ~= "" then
+ local barename = file.removesuffix(name)
+ local fmtname = caches.getfirstreadablefile(file.addsuffix(barename,"fmt"),"formats")
+ if fmtname == "" then
+ fmtname = resolvers.find_file(file.addsuffix(barename,"fmt")) or ""
+ end
+ fmtname = resolvers.clean_path(fmtname)
+ if fmtname == "" then
+ logs.simple("no format with name: %s",name)
+ else
+ local barename = file.removesuffix(name) -- expanded name
+ local luaname = file.addsuffix(barename,"luc")
+ if not lfs.isfile(luaname) then
+ luaname = file.addsuffix(barename,"lua")
+ end
+ if not lfs.isfile(luaname) then
+ logs.simple("using format name: %s",fmtname)
+ logs.simple("no luc/lua with name: %s",barename)
+ else
+ local q = string.quote
+ local command = string.format("luatex --fmt=%s --lua=%s %s %s",q(barename),q(luaname),q(data),more ~= "" and q(more) or "")
+ logs.simple("running command: %s",command)
+ os.spawn(command)
+ end
+ end
+ end
+end
diff --git a/tex/context/base/luat-ini.lua b/tex/context/base/luat-ini.lua
index e6a715c07..2f12503ad 100644
--- a/tex/context/base/luat-ini.lua
+++ b/tex/context/base/luat-ini.lua
@@ -8,6 +8,11 @@ if not modules then modules = { } end modules ['luat-ini'] = {
--~ local ctxcatcodes = tex.ctxcatcodes
+local debug = require "debug"
+local string, table, lpeg, math, io, system = string, table, lpeg, math, io, system
+local next, setfenv = next, setfenv or debug.setfenv
+local format = string.format
+
--[[ldx--
<p>We cannot load anything yet. However what we will do us reserve a fewtables.
These can be used for runtime user data or third party modules and will not be
@@ -17,9 +22,11 @@ cluttered by macro package code.</p>
userdata = userdata or { } -- might be used
thirddata = thirddata or { } -- might be used
moduledata = moduledata or { } -- might be used
-document = document or { }
+documentdata = documentdata or { } -- might be used
parametersets = parametersets or { } -- experimental
+document = document or { }
+
--[[ldx--
<p>These can be used/set by the caller program; <t>mtx-context.lua</t> does it.</p>
--ldx]]--
@@ -44,66 +51,79 @@ just a lightweight suggestive system, not a watertight
one.</p>
--ldx]]--
-local debug = require "debug"
-
-local string, table, lpeg, math, io, system = string, table, lpeg, math, io, system
-local next, setfenv = next, setfenv or debug.setfenv
-local format = string.format
+-- this will change when we move on to lua 5.2+
local global = _G
global.global = global
+--~ rawset(global,"global",global)
local dummy = function() end
+-- another approach is to freeze tables by using a metatable, this will be
+-- implemented stepwise
+
local protected = {
-- global table
- global = global,
+ global = global,
-- user tables
- userdata = userdata,
- moduledata = moduledata,
- thirddata = thirddata,
- document = document,
+ -- moduledata = moduledata,
+ userdata = userdata,
+ thirddata = thirddata,
+ documentdata = documentdata,
-- reserved
- protect = dummy,
- unprotect = dummy,
+ protect = dummy,
+ unprotect = dummy,
-- luatex
- tex = tex,
+ tex = tex,
-- lua
- string = string,
- table = table,
- lpeg = lpeg,
- math = math,
- io = io,
- system = system,
+ string = string,
+ table = table,
+ lpeg = lpeg,
+ math = math,
+ io = io,
+ --
+ -- maybe other l-*, xml etc
}
-userdata, thirddata, moduledata = nil, nil, nil
+-- moduledata : no need for protection (only for developers)
+-- isolatedata : full protection
+-- userdata : protected
+-- thirddata : protected
+
+userdata, thirddata = nil, nil
if not setfenv then
texio.write_nl("warning: we need to fix setfenv by using 'load in' or '_ENV'")
end
-function protect(name)
- if name == "isolateddata" then
- local t = { }
+local function protect_full(name)
+ local t = { }
+ for k, v in next, protected do
+ t[k] = v
+ end
+ return t
+end
+
+local function protect_part(name)
+--~ local t = global[name]
+ local t = rawget(global,name)
+ if not t then
+ t = { }
for k, v in next, protected do
t[k] = v
end
- setfenv(2,t)
+--~ global[name] = t
+ rawset(global,name,t)
+ end
+ return t
+end
+
+function protect(name)
+ if name == "isolateddata" then
+ setfenv(2,protect_full(name))
else
- if not name then
- name = "shareddata"
- end
- local t = global[name]
- if not t then
- t = { }
- for k, v in next, protected do
- t[k] = v
- end
- global[name] = t
- end
- setfenv(2,t)
+ setfenv(2,protect_part(name or "shareddata"))
end
end
@@ -119,6 +139,10 @@ function lua.registername(name,message)
end
lua.name[lnn] = message
tex.write(lnn)
+ -- initialize once
+ if name ~= "isolateddata" then
+ protect_full(name or "shareddata")
+ end
end
--~ function lua.checknames()
diff --git a/tex/context/base/luat-ini.mkiv b/tex/context/base/luat-ini.mkiv
index c9d88bf4f..b7a0eb516 100644
--- a/tex/context/base/luat-ini.mkiv
+++ b/tex/context/base/luat-ini.mkiv
@@ -135,17 +135,20 @@
\csname dodostartnamed#1\v!code\endcsname}
\unexpanded\def\definenamedlua[#1]#2[#3]% no optional arg handling here yet
- {\scratchcounter\ctxlua{lua.registername("#1","#3")}%
- \normalexpanded{\long\edef\csname dodostartnamed#1\v!code\endcsname##1\csname\e!stop#1\v!code\endcsname}%
- {\endgroup\noexpand\directlua\the\scratchcounter{protect("#1\s!data")##1}}%
- \long\expandafter\def \csname\e!start#1\v!code\endcsname {\dostartnamedluacode{#1}}%
- \long\expandafter\edef\csname #1\v!code\endcsname##1{\noexpand\directlua\the\scratchcounter{protect("#1\s!data")##1}}}
+ {\ifcsname dodostartnamed#1\v!code\endcsname\else
+ \scratchcounter\ctxlua{lua.registername("#1","#3")}%
+ \normalexpanded{\long\edef\csname dodostartnamed#1\v!code\endcsname##1\csname\e!stop#1\v!code\endcsname}%
+ {\endgroup\noexpand\directlua\the\scratchcounter{protect("#1\s!data")##1}}%
+ \long\expandafter\def \csname\e!start#1\v!code\endcsname {\dostartnamedluacode{#1}}%
+ \long\expandafter\edef\csname #1\v!code\endcsname##1{\noexpand\directlua\the\scratchcounter{protect("#1\s!data")##1}}%
+ \fi}
%D We predefine a few.
+% \definenamedlua[module][module instance] % not needed
+
\definenamedlua[user] [private user instance]
\definenamedlua[third] [third party module instance]
-\definenamedlua[module] [module instance]
\definenamedlua[isolated][isolated instance]
%D In practice this works out as follows:
diff --git a/tex/context/base/luat-iop.lua b/tex/context/base/luat-iop.lua
index e5722d2bd..5d0d1f6c9 100644
--- a/tex/context/base/luat-iop.lua
+++ b/tex/context/base/luat-iop.lua
@@ -87,18 +87,6 @@ end
--~ f = io.open('c:/windows/crap.log') print(f)
--~ f = io.open('c:/windows/wmsetup.log') print(f)
-local inpout = { 'inp', 'out' }
-
-function io.set_opener_modes(i,o)
- local first = sub(i,1,1)
- for k=1,#inpout do
- local iov = io[inpout[k]]
- local f = iov[i] or iov[first]
- if f then f() end
- end
- io.open = io.finalize_openers(io.open)
-end
-
-- restricted
function ioinp.modes.restricted()
@@ -143,6 +131,14 @@ function ioout.modes.handy()
o_permit('[^/]')
end
---~ io.set_opener_modes('p','p')
---~ io.set_opener_modes('r','r')
---~ io.set_opener_modes('h','h')
+
+function io.checkopeners()
+ local inp = resolvers.variable("input_mode")
+ local out = resolvers.variable("output_mode")
+ inp = inp and ioinp.modes[inp]
+ out = out and ioinp.modes[out]
+ if inp then inp() end
+ if out then out() end
+end
+
+--~ io.checkopeners()
diff --git a/tex/context/base/luat-lib.mkiv b/tex/context/base/luat-lib.mkiv
index 91ddec0aa..2a9d5ecd3 100644
--- a/tex/context/base/luat-lib.mkiv
+++ b/tex/context/base/luat-lib.mkiv
@@ -11,60 +11,51 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-% \writestatus{loading}{ConTeXt Lua Macros / Libraries}
-
-\registerctxluafile{trac-inf} {1.001}
-\registerctxluafile{trac-tra} {1.001}
-\registerctxluafile{trac-log} {1.001}
-
-\registerctxluafile{luat-cbk} {1.001}
-
-\registerctxluafile{data-res} {1.001}
-\registerctxluafile{data-tmp} {1.001}
-\registerctxluafile{data-pre} {1.001}
-\registerctxluafile{data-inp} {1.001}
-\registerctxluafile{data-out} {1.001}
-\registerctxluafile{data-tex} {1.001}
-\registerctxluafile{data-bin} {1.001}
-\registerctxluafile{data-zip} {1.001}
-%registerctxluafile{data-crl} {1.001}
-\registerctxluafile{data-sch} {1.001}
-\registerctxluafile{data-tre} {1.001}
-\registerctxluafile{data-lua} {1.001}
-\registerctxluafile{data-ctx} {1.001}
-\registerctxluafile{data-con} {1.001}
-\registerctxluafile{data-use} {1.001}
-
-\registerctxluafile{luat-run} {1.001}
-\registerctxluafile{luat-fio} {1.001} % not needed, part of startup file
-\registerctxluafile{luat-cnf} {1.001} % not needed, part of startup file
-\registerctxluafile{luat-lua} {1.001}
-\registerctxluafile{luat-sto} {1.001}
-\registerctxluafile{luat-ini} {1.001}
-\registerctxluafile{luat-env} {1.001}
-
-\registerctxluafile{lxml-tab} {1.001}
-\registerctxluafile{lxml-lpt} {1.001}
-\registerctxluafile{lxml-xml} {1.001}
-\registerctxluafile{lxml-aux} {1.001}
-\registerctxluafile{lxml-mis} {1.001}
-
-\startruntimeluacode
- \edef\asciia{\ctxlua{tex.sprint(logs.mode)}}
- \edef\asciib{xml}
- \ifx\asciia\asciib % brrr
- \long\def\writebanner #1{\writestring {<m t='banner'>#1</m>}}
- \long\def\writestatus#1#2{\writestring {<m t='#1'>#2</m>}}
- \long\def\message #1{\normalmessage{<m t='message'>#1</m>}}
- \else
- \let\writebanner\writestring
- %\let\writestatus\normalwritestatus
- \let\message \normalmessage
- \fi
-\stopruntimeluacode
-
-%registerctxluafile{luat-tmp}{1.001}
+\writestatus{loading}{ConTeXt Lua Macros / Libraries}
+
+\registerctxluafile{trac-inf}{1.001}
+\registerctxluafile{trac-set}{1.001}
+\registerctxluafile{trac-tra}{1.001}
+\registerctxluafile{trac-log}{1.001}
+\registerctxluafile{trac-pro}{1.001}
+
+\registerctxluafile{data-ini}{1.001}
+\registerctxluafile{data-exp}{1.001}
+\registerctxluafile{data-env}{1.001}
+\registerctxluafile{data-tmp}{1.001}
+\registerctxluafile{data-met}{1.001}
+\registerctxluafile{data-res}{1.001}
+
+\registerctxluafile{data-pre}{1.001}
+\registerctxluafile{data-inp}{1.001}
+\registerctxluafile{data-out}{1.001}
+\registerctxluafile{data-tex}{1.001}
+\registerctxluafile{data-bin}{1.001}
+\registerctxluafile{data-zip}{1.001}
+%registerctxluafile{data-crl}{1.001}
+\registerctxluafile{data-sch}{1.001}
+\registerctxluafile{data-tre}{1.001}
+\registerctxluafile{data-lua}{1.001}
+\registerctxluafile{data-ctx}{1.001}
+\registerctxluafile{data-con}{1.001}
+\registerctxluafile{data-use}{1.001}
+\registerctxluafile{data-aux}{1.001}
+
+\registerctxluafile{luat-cbk}{1.001}
+\registerctxluafile{luat-run}{1.001}
+\registerctxluafile{luat-fio}{1.001}
+\registerctxluafile{luat-cnf}{1.001}
+\registerctxluafile{luat-lua}{1.001}
+\registerctxluafile{luat-sto}{1.001}
+\registerctxluafile{luat-ini}{1.001}
+\registerctxluafile{luat-env}{1.001}
\registerctxluafile{luat-exe}{1.001}
\registerctxluafile{luat-iop}{1.001}
+\registerctxluafile{lxml-tab}{1.001}
+\registerctxluafile{lxml-lpt}{1.001}
+\registerctxluafile{lxml-xml}{1.001}
+\registerctxluafile{lxml-aux}{1.001}
+\registerctxluafile{lxml-mis}{1.001}
+
\endinput
diff --git a/tex/context/base/luat-run.lua b/tex/context/base/luat-run.lua
index b64a99fc6..d33cbddd6 100644
--- a/tex/context/base/luat-run.lua
+++ b/tex/context/base/luat-run.lua
@@ -8,18 +8,18 @@ if not modules then modules = { } end modules ['luat-run'] = {
local format, rpadd = string.format, string.rpadd
-main = main or { }
+luatex = luatex or { }
local start_actions = { }
local stop_actions = { }
-function main.register_start_actions(...) table.insert(start_actions, ...) end
-function main.register_stop_actions (...) table.insert(stop_actions, ...) end
+function luatex.register_start_actions(...) table.insert(start_actions, ...) end
+function luatex.register_stop_actions (...) table.insert(stop_actions, ...) end
-main.show_tex_stat = main.show_tex_stat or function() end
-main.show_job_stat = main.show_job_stat or statistics.show_job_stat
+luatex.show_tex_stat = luatex.show_tex_stat or function() end
+luatex.show_job_stat = luatex.show_job_stat or statistics.show_job_stat
-function main.start()
+function luatex.start_run()
if logs.start_run then
logs.start_run()
end
@@ -28,14 +28,14 @@ function main.start()
end
end
-function main.stop()
+function luatex.stop_run()
for _, action in next, stop_actions do
action()
end
- if main.show_job_stat then
+ if luatex.show_job_stat then
statistics.show(logs.report_job_stat)
end
- if main.show_tex_stat then
+ if luatex.show_tex_stat then
for k,v in next, status.list() do
logs.report_tex_stat(k,v)
end
@@ -45,30 +45,30 @@ function main.stop()
end
end
-function main.start_shipout_page()
+function luatex.start_shipout_page()
logs.start_page_number()
end
-function main.stop_shipout_page()
+function luatex.stop_shipout_page()
logs.stop_page_number()
end
-function main.report_output_pages()
+function luatex.report_output_pages()
end
-function main.report_output_log()
+function luatex.report_output_log()
end
-- this can be done later
-callbacks.register('start_run', main.start, "actions performed at the beginning of a run")
-callbacks.register('stop_run', main.stop, "actions performed at the end of a run")
+callbacks.register('start_run', luatex.start_run, "actions performed at the beginning of a run")
+callbacks.register('stop_run', luatex.stop_run, "actions performed at the end of a run")
-callbacks.register('report_output_pages', main.report_output_pages, "actions performed when reporting pages")
-callbacks.register('report_output_log', main.report_output_log, "actions performed when reporting log file")
+callbacks.register('report_output_pages', luatex.report_output_pages, "actions performed when reporting pages")
+callbacks.register('report_output_log', luatex.report_output_log, "actions performed when reporting log file")
-callbacks.register('start_page_number', main.start_shipout_page, "actions performed at the beginning of a shipout")
-callbacks.register('stop_page_number', main.stop_shipout_page, "actions performed at the end of a shipout")
+callbacks.register('start_page_number', luatex.start_shipout_page, "actions performed at the beginning of a shipout")
+callbacks.register('stop_page_number', luatex.stop_shipout_page, "actions performed at the end of a shipout")
-callbacks.register('process_input_buffer', false, "actions performed when reading data")
-callbacks.register('process_output_buffer', false, "actions performed when writing data")
+callbacks.register('process_input_buffer', false, "actions performed when reading data")
+callbacks.register('process_output_buffer', false, "actions performed when writing data")
diff --git a/tex/context/base/luat-sto.lua b/tex/context/base/luat-sto.lua
index 08da735db..4d7af73e4 100644
--- a/tex/context/base/luat-sto.lua
+++ b/tex/context/base/luat-sto.lua
@@ -17,6 +17,8 @@ storage.nofmodules = storage.nofmodules or 0
storage.data = { }
storage.evaluators = { }
+local report_storage = logs.new("storage")
+
local evaluators = storage.evaluators -- (evaluate,message,names)
local data = storage.data
@@ -30,7 +32,7 @@ function storage.evaluate(name)
evaluators[#evaluators+1] = name
end
-function storage.finalize() -- we can prepend the string with "evaluate:"
+local function finalize() -- we can prepend the string with "evaluate:"
for i=1,#evaluators do
local t = evaluators[i]
for i, v in next, t do
@@ -50,7 +52,9 @@ function storage.finalize() -- we can prepend the string with "evaluate:"
end
end
-function storage.dump()
+lua.registerfinalizer(finalize)
+
+local function dump()
for i=1,#data do
local d = data[i]
local message, original, target, evaluate = d[1], d[2] ,d[3] ,d[4]
@@ -68,10 +72,10 @@ function storage.dump()
end
storage.max = storage.max + 1
if trace_storage then
- logs.report('storage','saving %s in slot %s',message,storage.max)
+ report_storage('saving %s in slot %s',message,storage.max)
code =
initialize ..
- format("logs.report('storage','restoring %s from slot %s') ",message,storage.max) ..
+ format("report_storage('restoring %s from slot %s') ",message,storage.max) ..
table.serialize(original,name) ..
finalize
else
@@ -82,20 +86,22 @@ function storage.dump()
end
end
+lua.registerfinalizer(dump)
+
-- we also need to count at generation time (nicer for message)
-if lua.bytecode then -- from 0 upwards
- local i, b = storage.min, lua.bytecode
- while b[i] do
- storage.noftables = i
- b[i]()
- b[i] = nil
- i = i + 1
- end
-end
+--~ if lua.bytecode then -- from 0 upwards
+--~ local i, b = storage.min, lua.bytecode
+--~ while b[i] do
+--~ storage.noftables = i
+--~ b[i]()
+--~ b[i] = nil
+--~ i = i + 1
+--~ end
+--~ end
statistics.register("stored bytecode data", function()
- local modules = (storage.nofmodules > 0 and storage.nofmodules) or (status.luabytecodes - 500)
+ local modules = (storage.nofmodules > 0 and storage.nofmodules) or (status.luabytecodes - lua.firstbytecode - 1)
local dumps = (storage.noftables > 0 and storage.noftables) or storage.max-storage.min + 1
return format("%s modules, %s tables, %s chunks",modules,dumps,modules+dumps)
end)
@@ -104,8 +110,6 @@ if lua.bytedata then
storage.register("lua/bytedata",lua.bytedata,"lua.bytedata")
end
--- wrong place, kind of forward reference
-
function statistics.report_storage(whereto)
whereto = whereto or "term and log"
write_nl(whereto," ","stored tables:"," ")
diff --git a/tex/context/base/lxml-aux.lua b/tex/context/base/lxml-aux.lua
index 00f791909..523e7c544 100644
--- a/tex/context/base/lxml-aux.lua
+++ b/tex/context/base/lxml-aux.lua
@@ -11,6 +11,8 @@ if not modules then modules = { } end modules ['lxml-aux'] = {
local trace_manipulations = false trackers.register("lxml.manipulations", function(v) trace_manipulations = v end)
+local report_xml = logs.new("xml")
+
local xmlparseapply, xmlconvert, xmlcopy, xmlname = xml.parse_apply, xml.convert, xml.copy, xml.name
local xmlinheritedconvert = xml.inheritedconvert
@@ -19,7 +21,7 @@ local insert, remove = table.insert, table.remove
local gmatch, gsub = string.gmatch, string.gsub
local function report(what,pattern,c,e)
- logs.report("xml","%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
+ report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
end
local function withelements(e,handle,depth)
diff --git a/tex/context/base/lxml-ent.lua b/tex/context/base/lxml-ent.lua
index 193611937..a58b1d493 100644
--- a/tex/context/base/lxml-ent.lua
+++ b/tex/context/base/lxml-ent.lua
@@ -24,6 +24,8 @@ original entity is returned.</p>
local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
+local report_xml = logs.new("xml")
+
xml.entities = xml.entities or { } -- xml.entity_handler == function
storage.register("xml/entities",xml.entities,"xml.entities") -- this will move to lxml
@@ -37,7 +39,7 @@ local parsedentity = xml.parsedentitylpeg
function xml.register_entity(key,value)
entities[key] = value
if trace_entities then
- logs.report("xml","registering entity '%s' as: %s",key,value)
+ report_xml("registering entity '%s' as: %s",key,value)
end
end
diff --git a/tex/context/base/lxml-lpt.lua b/tex/context/base/lxml-lpt.lua
index bddbe4868..c3ec2370a 100644
--- a/tex/context/base/lxml-lpt.lua
+++ b/tex/context/base/lxml-lpt.lua
@@ -43,6 +43,8 @@ local trace_lpath = false if trackers then trackers.register("xml.path",
local trace_lparse = false if trackers then trackers.register("xml.parse", function(v) trace_lparse = v end) end
local trace_lprofile = false if trackers then trackers.register("xml.profile", function(v) trace_lpath = v trace_lparse = v trace_lprofile = v end) end
+local report_lpath = logs.new("lpath")
+
--[[ldx--
<p>We've now arrived at an interesting part: accessing the tree using a subset
of <l n='xpath'/> and since we're not compatible we call it <l n='lpath'/>. We
@@ -69,7 +71,7 @@ local function fallback (t, name)
if fn then
t[name] = fn
else
- logs.report("xml","unknown sub finalizer '%s'",tostring(name))
+ report_lpath("unknown sub finalizer '%s'",tostring(name))
fn = function() end
end
return fn
@@ -613,13 +615,13 @@ local skip = { }
local function errorrunner_e(str,cnv)
if not skip[str] then
- logs.report("lpath","error in expression: %s => %s",str,cnv)
+ report_lpath("error in expression: %s => %s",str,cnv)
skip[str] = cnv or str
end
return false
end
local function errorrunner_f(str,arg)
- logs.report("lpath","error in finalizer: %s(%s)",str,arg or "")
+ report_lpath("error in finalizer: %s(%s)",str,arg or "")
return false
end
@@ -786,7 +788,7 @@ local function lshow(parsed)
end
local s = table.serialize_functions -- ugly
table.serialize_functions = false -- ugly
- logs.report("lpath","%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,table.serialize(parsed,false))
+ report_lpath("%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,table.serialize(parsed,false))
table.serialize_functions = s -- ugly
end
@@ -816,7 +818,7 @@ parse_pattern = function (pattern) -- the gain of caching is rather minimal
local np = #parsed
if np == 0 then
parsed = { pattern = pattern, register_self, state = "parsing error" }
- logs.report("lpath","parsing error in '%s'",pattern)
+ report_lpath("parsing error in '%s'",pattern)
lshow(parsed)
else
-- we could have done this with a more complex parser but this
@@ -920,32 +922,32 @@ local function traced_apply(list,parsed,nofparsed,order)
if trace_lparse then
lshow(parsed)
end
- logs.report("lpath", "collecting : %s",parsed.pattern)
- logs.report("lpath", " root tags : %s",tagstostring(list))
- logs.report("lpath", " order : %s",order or "unset")
+ report_lpath("collecting : %s",parsed.pattern)
+ report_lpath(" root tags : %s",tagstostring(list))
+ report_lpath(" order : %s",order or "unset")
local collected = list
for i=1,nofparsed do
local pi = parsed[i]
local kind = pi.kind
if kind == "axis" then
collected = apply_axis[pi.axis](collected)
- logs.report("lpath", "% 10i : ax : %s",(collected and #collected) or 0,pi.axis)
+ report_lpath("% 10i : ax : %s",(collected and #collected) or 0,pi.axis)
elseif kind == "nodes" then
collected = apply_nodes(collected,pi.nodetest,pi.nodes)
- logs.report("lpath", "% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest))
+ report_lpath("% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest))
elseif kind == "expression" then
collected = apply_expression(collected,pi.evaluator,order)
- logs.report("lpath", "% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted)
+ report_lpath("% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted)
elseif kind == "finalizer" then
collected = pi.finalizer(collected)
- logs.report("lpath", "% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
+ report_lpath("% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
return collected
end
if not collected or #collected == 0 then
local pn = i < nofparsed and parsed[nofparsed]
if pn and pn.kind == "finalizer" then
collected = pn.finalizer(collected)
- logs.report("lpath", "% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "")
+ report_lpath("% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "")
return collected
end
return nil
@@ -1058,7 +1060,7 @@ expressions.boolean = toboolean
-- user interface
local function traverse(root,pattern,handle)
- logs.report("xml","use 'xml.selection' instead for '%s'",pattern)
+ report_lpath("use 'xml.selection' instead for '%s'",pattern)
local collected = parse_apply({ root },pattern)
if collected then
for c=1,#collected do
@@ -1106,7 +1108,7 @@ local function dofunction(collected,fnc)
f(collected[c])
end
else
- logs.report("xml","unknown function '%s'",fnc)
+ report_lpath("unknown function '%s'",fnc)
end
end
end
diff --git a/tex/context/base/lxml-tab.lua b/tex/context/base/lxml-tab.lua
index 23cd1cf04..49db5eb26 100644
--- a/tex/context/base/lxml-tab.lua
+++ b/tex/context/base/lxml-tab.lua
@@ -12,6 +12,8 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
+local report_xml = logs.new("xml")
+
--[[ldx--
<p>The parser used here is inspired by the variant discussed in the lua book, but
handles comment and processing instructions, has a different structure, provides
@@ -150,7 +152,7 @@ local dcache, hcache, acache = { }, { }, { }
local mt = { }
-function initialize_mt(root)
+local function initialize_mt(root)
mt = { __index = root } -- will be redefined later
end
@@ -254,7 +256,7 @@ local reported_attribute_errors = { }
local function attribute_value_error(str)
if not reported_attribute_errors[str] then
- logs.report("xml","invalid attribute value: %q",str)
+ report_xml("invalid attribute value: %q",str)
reported_attribute_errors[str] = true
at._error_ = str
end
@@ -262,7 +264,7 @@ local function attribute_value_error(str)
end
local function attribute_specification_error(str)
if not reported_attribute_errors[str] then
- logs.report("xml","invalid attribute specification: %q",str)
+ report_xml("invalid attribute specification: %q",str)
reported_attribute_errors[str] = true
at._error_ = str
end
@@ -325,18 +327,18 @@ local function handle_hex_entity(str)
h = unify_predefined and predefined_unified[n]
if h then
if trace_entities then
- logs.report("xml","utfize, converting hex entity &#x%s; into %s",str,h)
+ report_xml("utfize, converting hex entity &#x%s; into %s",str,h)
end
elseif utfize then
h = (n and utfchar(n)) or xml.unknown_hex_entity_format(str) or ""
if not n then
- logs.report("xml","utfize, ignoring hex entity &#x%s;",str)
+ report_xml("utfize, ignoring hex entity &#x%s;",str)
elseif trace_entities then
- logs.report("xml","utfize, converting hex entity &#x%s; into %s",str,h)
+ report_xml("utfize, converting hex entity &#x%s; into %s",str,h)
end
else
if trace_entities then
- logs.report("xml","found entity &#x%s;",str)
+ report_xml("found entity &#x%s;",str)
end
h = "&#x" .. str .. ";"
end
@@ -352,18 +354,18 @@ local function handle_dec_entity(str)
d = unify_predefined and predefined_unified[n]
if d then
if trace_entities then
- logs.report("xml","utfize, converting dec entity &#%s; into %s",str,d)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
elseif utfize then
d = (n and utfchar(n)) or xml.unknown_dec_entity_format(str) or ""
if not n then
- logs.report("xml","utfize, ignoring dec entity &#%s;",str)
+ report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- logs.report("xml","utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,h)
end
else
if trace_entities then
- logs.report("xml","found entity &#%s;",str)
+ report_xml("found entity &#%s;",str)
end
d = "&#" .. str .. ";"
end
@@ -388,7 +390,7 @@ local function handle_any_entity(str)
end
if a then
if trace_entities then
- logs.report("xml","resolved entity &%s; -> %s (internal)",str,a)
+ report_xml("resolved entity &%s; -> %s (internal)",str,a)
end
a = lpegmatch(parsedentity,a) or a
else
@@ -397,11 +399,11 @@ local function handle_any_entity(str)
end
if a then
if trace_entities then
- logs.report("xml","resolved entity &%s; -> %s (external)",str,a)
+ report_xml("resolved entity &%s; -> %s (external)",str,a)
end
else
if trace_entities then
- logs.report("xml","keeping entity &%s;",str)
+ report_xml("keeping entity &%s;",str)
end
if str == "" then
a = "&error;"
@@ -413,7 +415,7 @@ local function handle_any_entity(str)
acache[str] = a
elseif trace_entities then
if not acache[str] then
- logs.report("xml","converting entity &%s; into %s",str,a)
+ report_xml("converting entity &%s; into %s",str,a)
acache[str] = a
end
end
@@ -422,7 +424,7 @@ local function handle_any_entity(str)
local a = acache[str]
if not a then
if trace_entities then
- logs.report("xml","found entity &%s;",str)
+ report_xml("found entity &%s;",str)
end
a = resolve_predefined and predefined_simplified[str]
if a then
@@ -441,7 +443,7 @@ local function handle_any_entity(str)
end
local function handle_end_entity(chr)
- logs.report("xml","error in entity, %q found instead of ';'",chr)
+ report_xml("error in entity, %q found instead of ';'",chr)
end
local space = S(' \r\n\t')
@@ -576,7 +578,7 @@ local function xmlconvert(data, settings)
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, result, entities = { }, { }, { }, { }, nil, nil, settings.entities or { }
+ stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -604,6 +606,7 @@ local function xmlconvert(data, settings)
else
errorstr = "invalid xml file - no text at all"
end
+ local result
if errorstr and errorstr ~= "" then
result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={ }, er = true } } }
setmetatable(stack, mt)
@@ -784,7 +787,7 @@ local function verbose_element(e,handlers)
ats[#ats+1] = format('%s=%q',k,v)
end
end
- if ern and trace_remap and ern ~= ens then
+ if ern and trace_entities and ern ~= ens then
ens = ern
end
if ens ~= "" then
@@ -915,7 +918,7 @@ local function newhandlers(settings)
if settings then
for k,v in next, settings do
if type(v) == "table" then
- tk = t[k] if not tk then tk = { } t[k] = tk end
+ local tk = t[k] if not tk then tk = { } t[k] = tk end
for kk,vv in next, v do
tk[kk] = vv
end
@@ -1026,7 +1029,7 @@ local function xmltext(root) -- inline
return (root and xmltostring(root)) or ""
end
-function initialize_mt(root)
+initialize_mt = function(root) -- redefinition
mt = { __tostring = xmltext, __index = root }
end
diff --git a/tex/context/base/lxml-tex.lua b/tex/context/base/lxml-tex.lua
index aaa90217f..5b116fbf4 100644
--- a/tex/context/base/lxml-tex.lua
+++ b/tex/context/base/lxml-tex.lua
@@ -15,7 +15,7 @@ local type, next, tonumber, tostring = type, next, tonumber, tostring
local lpegmatch = lpeg.match
local P, S, C, Cc = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc
-if not tex and not tex.sprint then
+if not tex and not tex.sprint then -- no longer needed
tex = {
sprint = function(catcodes,...) texio.write(table.concat{...}) end,
print = function(catcodes,...) texio.write(table.concat{...}) end,
@@ -43,6 +43,8 @@ local trace_loading = false trackers.register("lxml.loading", function(v) tra
local trace_access = false trackers.register("lxml.access", function(v) trace_access = v end)
local trace_comments = false trackers.register("lxml.comments", function(v) trace_comments = v end)
+local report_lxml = logs.new("lxml")
+
lxml = lxml or { }
lxml.loaded = lxml.loaded or { }
@@ -211,20 +213,20 @@ local function get_id(id, qualified)
return root
end
elseif trace_access then
- logs.report("lxml","'%s' has no index entry '%s'",d,i)
+ report_lxml("'%s' has no index entry '%s'",d,i)
end
elseif trace_access then
- logs.report("lxml","'%s' has no index",d)
+ report_lxml("'%s' has no index",d)
end
elseif trace_access then
- logs.report("lxml","'%s' is not loaded",d)
+ report_lxml("'%s' is not loaded",d)
end
elseif trace_access then
- logs.report("lxml","'%s' is not loaded",i)
+ report_lxml("'%s' is not loaded",i)
end
end
elseif trace_access then
- logs.report("lxml","invalid id (nil)")
+ report_lxml("invalid id (nil)")
end
end
@@ -270,7 +272,7 @@ local function addindex(name,check_sum,force)
root.index = index
root.maxindex = maxindex
if trace_access then
- logs.report("lxml","%s indexed, %s nodes",tostring(name),maxindex)
+ report_lxml("%s indexed, %s nodes",tostring(name),maxindex)
end
end
end
@@ -444,7 +446,7 @@ end
local function tex_comment(e,handlers)
if trace_comments then
- logs.report("lxml","comment: %s",e.dt[1])
+ report_lxml("comment: %s",e.dt[1])
end
end
@@ -470,7 +472,7 @@ local function tex_element(e,handlers)
end
texsprint(ctxcatcodes,"\\xmlw{",command,"}{",rootname,"::",ix,"}")
else
- logs.report("lxml", "fatal error: no index for '%s'",command)
+ report_lxml( "fatal error: no index for '%s'",command)
texsprint(ctxcatcodes,"\\xmlw{",command,"}{",ix or 0,"}")
end
elseif tc == "function" then
@@ -522,7 +524,7 @@ local function ctx_text(e)
end
local function tex_handle(...)
--- logs.report("lxml", "error while flushing: %s", concat { ... })
+-- report_lxml( "error while flushing: %s", concat { ... })
texsprint(...) -- notcatcodes is active anyway
end
@@ -762,19 +764,19 @@ function lxml.setsetup(id,pattern,setup)
local ix = e.ix or 0
if setup == "-" then
e.command = false
- logs.report("lxml","lpath matched (a) %5i: %s = %s -> skipped",c,ix,setup)
+ report_lxml("lpath matched (a) %5i: %s = %s -> skipped",c,ix,setup)
elseif setup == "+" then
e.command = true
- logs.report("lxml","lpath matched (b) %5i: %s = %s -> text",c,ix,setup)
+ report_lxml("lpath matched (b) %5i: %s = %s -> text",c,ix,setup)
else
local tg = e.tg
if tg then -- to be sure
e.command = tg
local ns = e.rn or e.ns
if ns == "" then
- logs.report("lxml","lpath matched (c) %5i: %s = %s -> %s",c,ix,tg,tg)
+ report_lxml("lpath matched (c) %5i: %s = %s -> %s",c,ix,tg,tg)
else
- logs.report("lxml","lpath matched (d) %5i: %s = %s:%s -> %s",c,ix,ns,tg,tg)
+ report_lxml("lpath matched (d) %5i: %s = %s:%s -> %s",c,ix,ns,tg,tg)
end
end
end
@@ -792,7 +794,7 @@ function lxml.setsetup(id,pattern,setup)
end
end
elseif trace_setups then
- logs.report("lxml","no lpath matches for %s",pattern)
+ report_lxml("no lpath matches for %s",pattern)
end
else
local a, b = match(setup,"^(.+:)([%*%-])$")
@@ -806,23 +808,23 @@ function lxml.setsetup(id,pattern,setup)
if b == "-" then
e.command = false
if ns == "" then
- logs.report("lxml","lpath matched (e) %5i: %s = %s -> skipped",c,ix,tg)
+ report_lxml("lpath matched (e) %5i: %s = %s -> skipped",c,ix,tg)
else
- logs.report("lxml","lpath matched (f) %5i: %s = %s:%s -> skipped",c,ix,ns,tg)
+ report_lxml("lpath matched (f) %5i: %s = %s:%s -> skipped",c,ix,ns,tg)
end
elseif b == "+" then
e.command = true
if ns == "" then
- logs.report("lxml","lpath matched (g) %5i: %s = %s -> text",c,ix,tg)
+ report_lxml("lpath matched (g) %5i: %s = %s -> text",c,ix,tg)
else
- logs.report("lxml","lpath matched (h) %5i: %s = %s:%s -> text",c,ix,ns,tg)
+ report_lxml("lpath matched (h) %5i: %s = %s:%s -> text",c,ix,ns,tg)
end
else
e.command = a .. tg
if ns == "" then
- logs.report("lxml","lpath matched (i) %5i: %s = %s -> %s",c,ix,tg,e.command)
+ report_lxml("lpath matched (i) %5i: %s = %s -> %s",c,ix,tg,e.command)
else
- logs.report("lxml","lpath matched (j) %5i: %s = %s:%s -> %s",c,ix,ns,tg,e.command)
+ report_lxml("lpath matched (j) %5i: %s = %s:%s -> %s",c,ix,ns,tg,e.command)
end
end
end
@@ -839,7 +841,7 @@ function lxml.setsetup(id,pattern,setup)
end
end
elseif trace_setups then
- logs.report("lxml","no lpath matches for %s",pattern)
+ report_lxml("no lpath matches for %s",pattern)
end
else
local collected = lxmlparseapply(id,pattern)
@@ -850,9 +852,9 @@ function lxml.setsetup(id,pattern,setup)
e.command = setup
local ns, tg, ix = e.rn or e.ns, e.tg, e.ix or 0
if ns == "" then
- logs.report("lxml","lpath matched (k) %5i: %s = %s -> %s",c,ix,tg,setup)
+ report_lxml("lpath matched (k) %5i: %s = %s -> %s",c,ix,tg,setup)
else
- logs.report("lxml","lpath matched (l) %5i: %s = %s:%s -> %s",c,ix,ns,tg,setup)
+ report_lxml("lpath matched (l) %5i: %s = %s:%s -> %s",c,ix,ns,tg,setup)
end
end
else
@@ -861,7 +863,7 @@ function lxml.setsetup(id,pattern,setup)
end
end
elseif trace_setups then
- logs.report("lxml","no lpath matches for %s",pattern)
+ report_lxml("no lpath matches for %s",pattern)
end
end
end
@@ -932,9 +934,10 @@ local function index(collected,n)
end
end
-local function command(collected,cmd)
- if collected then
- for c=1,#collected do
+local function command(collected,cmd,otherwise)
+ local n = collected and #collected
+ if n and n > 0 then
+ for c=1,n do
local e = collected[c]
local ix = e.ix
if not ix then
@@ -943,6 +946,8 @@ local function command(collected,cmd)
end
texsprint(ctxcatcodes,"\\xmlw{",cmd,"}{",e.name,"::",ix,"}")
end
+ elseif otherwise then
+ texsprint(ctxcatcodes,"\\xmlw{",otherwise,"}{#1}")
end
end
diff --git a/tex/context/base/m-barcodes.mkiv b/tex/context/base/m-barcodes.mkiv
index b0eae1485..b86149cb9 100644
--- a/tex/context/base/m-barcodes.mkiv
+++ b/tex/context/base/m-barcodes.mkiv
@@ -35,7 +35,7 @@
% \definefont[barcodefont][file:texgyreheros-regular]
\startluacode
-plugins.barcodes = { }
+moduledata.barcodes = { }
local function split(code)
local t = { string.byte(code,1,#code) }
@@ -53,7 +53,7 @@ local function split(code)
end
end
-function plugins.barcodes.isbn_1(original)
+function moduledata.barcodes.isbn_1(original)
local code = string.gsub(original,"%-","")
local t, s, m, c = split(code)
if t then
@@ -68,7 +68,7 @@ function plugins.barcodes.isbn_1(original)
tex.sprint(code)
end
-function plugins.barcodes.isbn_2(original)
+function moduledata.barcodes.isbn_2(original)
local code = string.gsub(original,"%-","")
local t, s, m, c = split(code)
if t and #t == 12 then
@@ -85,13 +85,13 @@ end
\vbox {
\hbox {
\hskip3.7mm
- \scale[width=34mm]{\barcodefont ISBN \ctxlua{plugins.barcodes.isbn_2("\getvariable{barcode}{code}")}}
+ \scale[width=34mm]{\barcodefont ISBN \ctxlua{moduledata.barcodes.isbn_2("\getvariable{barcode}{code}")}}
}
\par
\normalexpanded { \noexpand \setPSTRICKS {
\noexpand \pspicture(-4mm,-1mm)(38mm,26mm)
\noexpand \psbarcode {
- \ctxlua{plugins.barcodes.isbn_1("\getvariable{barcode}{code}")}
+ \ctxlua{moduledata.barcodes.isbn_1("\getvariable{barcode}{code}")}
} {
includetext guardwhitespace
} {
diff --git a/tex/context/base/m-mathcrap.mkiv b/tex/context/base/m-mathcrap.mkiv
new file mode 100644
index 000000000..37dbbedeb
--- /dev/null
+++ b/tex/context/base/m-mathcrap.mkiv
@@ -0,0 +1,76 @@
+%D \module
+%D [ file=m-mathcrap,
+%D version=2010.05.30,
+%D title=\CONTEXT\ Modules,
+%D subtitle=Math Crap,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright=PRAGMA ADE]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D This is meant for those who want to use the (incomplete and sort of useless)
+%D unicode superscripts and subscripts. We should look ahead and collapse them
+%D but I will only implement that in calcmath when the need is there. Now the
+%D spacing can be somewhat non optimal but probably that does not matter here.
+%D
+%D \startbuffer
+%D $a₀₁₂₃₄₅₆₇₈₉₋₌₊$
+%D \stopbuffer
+%D
+%D \typebuffer \blank \getbuffer \blank
+
+\unprotect
+
+\unexpanded\def\mathunicodesupercrap#1{\mathortext{{^{#1}}}{\high{#1}}}
+\unexpanded\def\mathunicodesubcrap #1{\mathortext{{_{#1}}}{\low {#1}}}
+
+\ifdefined\installanddefineactivecharacter\else
+
+ \def\installanddefineactivecharacter #1 #2% we need this as command
+ {\normalexpanded{\noexpand\installactivecharacter \utfchar{#1} }%
+ \defineactivecharacter #1 {#2}}
+
+\fi
+
+\installanddefineactivecharacter "2070 {\mathunicodesupercrap 0}
+\installanddefineactivecharacter "00B9 {\mathunicodesupercrap 1}₀
+\installanddefineactivecharacter "00B2 {\mathunicodesupercrap 2}₀
+\installanddefineactivecharacter "00B3 {\mathunicodesupercrap 3}₀
+\installanddefineactivecharacter "2074 {\mathunicodesupercrap 4}
+\installanddefineactivecharacter "2075 {\mathunicodesupercrap 5}
+\installanddefineactivecharacter "2076 {\mathunicodesupercrap 6}
+\installanddefineactivecharacter "2077 {\mathunicodesupercrap 7}
+\installanddefineactivecharacter "2078 {\mathunicodesupercrap 8}
+\installanddefineactivecharacter "2079 {\mathunicodesupercrap 9}
+\installanddefineactivecharacter "207A {\mathunicodesupercrap +}
+\installanddefineactivecharacter "207B {\mathunicodesupercrap -}
+\installanddefineactivecharacter "207C {\mathunicodesupercrap =}
+\installanddefineactivecharacter "207D {\mathunicodesupercrap (}
+\installanddefineactivecharacter "207E {\mathunicodesupercrap )}
+\installanddefineactivecharacter "207F {\mathunicodesupercrap n}
+
+\installanddefineactivecharacter "2080 {\mathunicodesubcrap 0}
+\installanddefineactivecharacter "2081 {\mathunicodesubcrap 1}
+\installanddefineactivecharacter "2082 {\mathunicodesubcrap 2}
+\installanddefineactivecharacter "2083 {\mathunicodesubcrap 3}
+\installanddefineactivecharacter "2084 {\mathunicodesubcrap 4}
+\installanddefineactivecharacter "2085 {\mathunicodesubcrap 5}
+\installanddefineactivecharacter "2086 {\mathunicodesubcrap 6}
+\installanddefineactivecharacter "2087 {\mathunicodesubcrap 7}
+\installanddefineactivecharacter "2088 {\mathunicodesubcrap 8}
+\installanddefineactivecharacter "2089 {\mathunicodesubcrap 9}
+\installanddefineactivecharacter "208A {\mathunicodesubcrap +}
+\installanddefineactivecharacter "208B {\mathunicodesubcrap -}
+\installanddefineactivecharacter "208C {\mathunicodesubcrap =}
+\installanddefineactivecharacter "208D {\mathunicodesubcrap (}
+\installanddefineactivecharacter "208E {\mathunicodesubcrap )}
+\installanddefineactivecharacter "2090 {\mathunicodesubcrap A}
+\installanddefineactivecharacter "2091 {\mathunicodesubcrap E}
+\installanddefineactivecharacter "2092 {\mathunicodesubcrap O}
+\installanddefineactivecharacter "2093 {\mathunicodesubcrap X}
+%installanddefineactivecharacter "2094 {\mathunicodesubcrap ?} % SCHWAA
+
+\protect \endinput
diff --git a/tex/context/base/m-pstricks.lua b/tex/context/base/m-pstricks.lua
index 35cae93f6..4fb80c7ed 100644
--- a/tex/context/base/m-pstricks.lua
+++ b/tex/context/base/m-pstricks.lua
@@ -17,8 +17,9 @@ if not modules then modules = { } end modules ['m-pstricks'] = {
local format, lower, concat, gmatch = string.format, string.lower, table.concat, string.gmatch
local variables = interfaces.variables
-plugins = plugins or { }
-plugins.pstricks = plugins.pstricks or { }
+moduledata.pstricks = moduledata.pstricks or { }
+
+local report_pstricks = logs.new("pstricks")
local template = [[
\starttext
@@ -41,13 +42,13 @@ local template = [[
local modules = { }
local graphics = 0
-function plugins.pstricks.usemodule(names)
+function moduledata.pstricks.usemodule(names)
for name in gmatch(names,"([^%s,]+)") do
modules[#modules+1] = format([[\readfile{%s}{}{}]],name)
end
end
-function plugins.pstricks.process(n)
+function moduledata.pstricks.process(n)
graphics = graphics + 1
local name = string.format("%s-pstricks-%04i",tex.jobname,graphics)
local data = buffers.collect("def-"..n)
@@ -65,9 +66,9 @@ function plugins.pstricks.process(n)
if lfs.isfile(pdffile) then
context.externalfigure( { pdffile }, { object = variables.no } )
else
- logs.report("plugins","pstricks run failed, no pdf file")
+ report_pstricks("run failed, no pdf file")
end
else
- logs.report("plugins","pstricks run failed, no ps file")
+ report_pstricks("run failed, no ps file")
end
end
diff --git a/tex/context/base/m-pstricks.mkiv b/tex/context/base/m-pstricks.mkiv
index c800ec199..c976982a6 100644
--- a/tex/context/base/m-pstricks.mkiv
+++ b/tex/context/base/m-pstricks.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\ctxloadluafile{m-pstricks}{}
+\registerctxluafile{m-pstricks}{}
%D \startbuffer
%D \usePSTRICKSmodule[pst-barcode]
@@ -57,8 +57,8 @@
\definebuffer[PSTRICKS]
-\unexpanded\def\processPSTRICKS {\ctxlua{plugins.pstricks.process(\thebuffernumber{PSTRICKS})}}
-\unexpanded\def\usePSTRICKSmodule[#1]{\ctxlua{plugins.pstricks.usemodule("#1")}}
+\unexpanded\def\processPSTRICKS {\ctxlua{moduledata.pstricks.process(\thebuffernumber{PSTRICKS})}}
+\unexpanded\def\usePSTRICKSmodule[#1]{\ctxlua{moduledata.pstricks.usemodule("#1")}}
\unexpanded\def\setPSTRICKS #1{\setbuffer[def-\thebuffernumber{PSTRICKS}]#1\endbuffer}
\let\stopPSTRICKS\processPSTRICKS
diff --git a/tex/context/base/m-punk.mkiv b/tex/context/base/m-punk.mkiv
index 65bf03974..051d51485 100644
--- a/tex/context/base/m-punk.mkiv
+++ b/tex/context/base/m-punk.mkiv
@@ -92,7 +92,7 @@ do
instances = instances or metapost.characters.instances or 10
local fontname = file.removesuffix(file.basename(name))
local hash = file.robustname(string.format("%s %05i %03i", fontname, scalefactor*1000, instances))
- local lists = containers.read(fonts.mp.cache(), hash)
+ local lists = containers.read(fonts.mp.cache, hash)
if not lists then
statistics.starttiming(flusher)
-- we can use a format per font
@@ -137,7 +137,7 @@ do
}
end
metapost.reset(mpxformat) -- saves memory
- lists = containers.write(fonts.mp.cache(), hash, lists)
+ lists = containers.write(fonts.mp.cache, hash, lists)
statistics.stoptiming(flusher)
end
variants = variants + #lists
diff --git a/tex/context/base/m-timing.tex b/tex/context/base/m-timing.tex
index f02a90087..55185b0b2 100644
--- a/tex/context/base/m-timing.tex
+++ b/tex/context/base/m-timing.tex
@@ -36,7 +36,7 @@
\ctxloadluafile{trac-tim}{}
\startluacode
-local progress = plugins.progress
+local progress = moduledata.progress
function progress.show(filename,parameters,nodes,other)
for n, name in pairs(parameters or progress.parameters(filename)) do
@@ -51,16 +51,16 @@ end
% \everyfirstshipout
\startnotmode[no-timing]
- \appendtoks\ctxlua{plugins.progress.store()}\to\everystarttext
- \appendtoks\ctxlua{plugins.progress.store()}\to\everyshipout
- \ctxlua{main.register_stop_actions(function() plugins.progress.save() end)}
+ \appendtoks\ctxlua{moduledata.progress.store()}\to\everystarttext
+ \appendtoks\ctxlua{moduledata.progress.store()}\to\everyshipout
+ \ctxlua{luatex.register_stop_actions(function() moduledata.progress.save() end)}
\stopnotmode
\def\ShowNamedUsage#1#2#3%
{\setbox\scratchbox\vbox\bgroup\startMPcode
begingroup ; save p, q, b, h, w ;
path p, q, b ; numeric h, w ;
- p := \ctxlua{tex.sprint(plugins.progress.path("#1","#2"))} ;
+ p := \ctxlua{tex.sprint(moduledata.progress.path("#1","#2"))} ;
% p := p shifted -llcorner p ;
if bbwidth(p) > 1 :
h := 100 ; w := 2 * h ;
@@ -71,7 +71,7 @@ end
draw b withcolor \MPcolor{usage:frame} ;
draw p withcolor \MPcolor{usage:line} ;
if ("#3" <> "") and ("#3" <> "#2") :
- q := \ctxlua{tex.sprint(plugins.progress.path("#1","#3"))} ;
+ q := \ctxlua{tex.sprint(moduledata.progress.path("#1","#3"))} ;
% q := q shifted -llcorner q ;
if bbwidth(q) > 1 :
q := q xstretched w ;
@@ -87,16 +87,16 @@ end
\startlinecorrection
\box\scratchbox \endgraf
\hbox to \scratchdimen{\tttf\strut\detokenize{#2}\hss
- min:\ctxlua{tex.sprint(plugins.progress.bot("#1","\detokenize{#2}"))}, %
- max:\ctxlua{tex.sprint(plugins.progress.top("#1","\detokenize{#2}"))}, %
- pages:\ctxlua{tex.sprint(plugins.progress.pages("#1"))}%
+ min:\ctxlua{tex.sprint(moduledata.progress.bot("#1","\detokenize{#2}"))}, %
+ max:\ctxlua{tex.sprint(moduledata.progress.top("#1","\detokenize{#2}"))}, %
+ pages:\ctxlua{tex.sprint(moduledata.progress.pages("#1"))}%
}%
\stoplinecorrection
\fi}
-\def\LoadUsage #1{\ctxlua{plugins.progress.convert("#1")}}
-\def\ShowUsage #1{\ctxlua{plugins.progress.show("#1",nil,nil,"elapsed_time")}}
-\def\ShowMemoryUsage#1{\ctxlua{plugins.progress.show("#1",nil,{}, "elapsed_time")}}
-\def\ShowNodeUsage #1{\ctxlua{plugins.progress.show("#1",{},nil, "elapsed_time")}}
+\def\LoadUsage #1{\ctxlua{moduledata.progress.convert("#1")}}
+\def\ShowUsage #1{\ctxlua{moduledata.progress.show("#1",nil,nil,"elapsed_time")}}
+\def\ShowMemoryUsage#1{\ctxlua{moduledata.progress.show("#1",nil,{}, "elapsed_time")}}
+\def\ShowNodeUsage #1{\ctxlua{moduledata.progress.show("#1",{},nil, "elapsed_time")}}
\endinput
diff --git a/tex/context/base/math-def.mkiv b/tex/context/base/math-def.mkiv
index 50c9902dd..af7166f80 100644
--- a/tex/context/base/math-def.mkiv
+++ b/tex/context/base/math-def.mkiv
@@ -113,6 +113,8 @@
\def\plainbigdelimiters % traditional method
{\chardef\bigmathdelimitermethod\plustwo}
+\plainbigdelimiters % is default for the moment but not so nice
+
\def\doplainbigmath#1#2%
{{\hbox{$%
\nulldelimiterspace\zeropoint\relax
diff --git a/tex/context/base/math-dim.lua b/tex/context/base/math-dim.lua
index 62d805126..fbaffe4fb 100644
--- a/tex/context/base/math-dim.lua
+++ b/tex/context/base/math-dim.lua
@@ -249,7 +249,6 @@ function mathematics.dimensions(dimens)
end
t[variable] = tt
end
---~ logs.report("warning", "version 0.47 is needed for proper delimited math")
local d = {
AxisHeight = t . axis . text_style,
AccentBaseHeight = t . accent_base_height . text_style,
diff --git a/tex/context/base/math-ent.lua b/tex/context/base/math-ent.lua
index e5e5b98f0..d387f9ee5 100644
--- a/tex/context/base/math-ent.lua
+++ b/tex/context/base/math-ent.lua
@@ -5,6 +5,8 @@ if not modules then modules = { } end modules ['math-ent'] = {
copyright = "derived from the mathml 2.0 specification",
}
+-- this might go into char-def
+
mathematics.entities={
["Aacute"]=0x000C1,
["aacute"]=0x000E1,
diff --git a/tex/context/base/math-ext.lua b/tex/context/base/math-ext.lua
index 673103677..32d0263d9 100644
--- a/tex/context/base/math-ext.lua
+++ b/tex/context/base/math-ext.lua
@@ -11,6 +11,8 @@ local trace_virtual = false trackers.register("math.virtual", function(v) trace_
mathematics = mathematics or { }
characters = characters or { }
+local report_math = logs.new("mathematics")
+
mathematics.extras = mathematics.extras or { }
characters.math = characters.math or { }
@@ -22,7 +24,7 @@ function mathematics.extras.add(unicode,t)
if unicode >= min and unicode <= max then
mathdata[unicode], chardata[unicode] = t, t
else
- logs.report("math extra","extra U+%04X should be in range U+%04X - U+%04X",unicode,min,max)
+ report_math("extra U+%04X should be in range U+%04X - U+%04X",unicode,min,max)
end
end
@@ -45,7 +47,7 @@ function mathematics.extras.copy(tfmdata)
local nextchar = characters[nextnext]
if nextchar then
if trace_virtual then
- logs.report("math extra","extra U+%04X in %s at %s maps on U+%04X (class: %s, name: %s)",unicode,file.basename(tfmdata.fullname),tfmdata.size,nextslot,extradesc.mathclass or "?",extradesc.mathname or "?")
+ report_math("extra U+%04X in %s at %s maps on U+%04X (class: %s, name: %s)",unicode,file.basename(tfmdata.fullname),tfmdata.size,nextslot,extradesc.mathclass or "?",extradesc.mathname or "?")
end
characters[unicode] = nextchar
break
@@ -53,11 +55,12 @@ function mathematics.extras.copy(tfmdata)
end
end
end
- if not characters[unicode] then
+ if not characters[unicode] then -- can be set in previous loop
for i=1,#nextinsize do
- local nextbase = characters[nextinsize[i]]
+ local nextslot = nextinsize[i]
+ local nextbase = characters[nextslot]
if nextbase then
- characters[unicode] = nextchar
+ characters[unicode] = nextbase -- still ok?
break
end
end
@@ -126,6 +129,16 @@ mathematics.extras.add(0xFE323, {
unicodeslot=0xFE323,
} )
+mathematics.extras.add(0xFE324, {
+ category="sm",
+ description="MATHEMATICAL SHORT BAR MIRRORED",
+-- direction="on",
+-- linebreak="nu",
+ mathclass="relation",
+ mathname="mapsfromchar",
+ unicodeslot=0xFE324,
+} )
+
--~ mathematics.extras.add(0xFE304, {
--~ category="sm",
--~ description="TOP AND BOTTOM PARENTHESES",
diff --git a/tex/context/base/math-ini.lua b/tex/context/base/math-ini.lua
index 63d7cad38..b5927f998 100644
--- a/tex/context/base/math-ini.lua
+++ b/tex/context/base/math-ini.lua
@@ -15,6 +15,8 @@ local texsprint, format, utfchar, utfbyte = tex.sprint, string.format, utf.char,
local trace_defining = false trackers.register("math.defining", function(v) trace_defining = v end)
+local report_math = logs.new("mathematics")
+
mathematics = mathematics or { }
mathematics.extrabase = 0xFE000 -- here we push some virtuals
@@ -154,11 +156,11 @@ end
local function report(class,family,unicode,name)
local nametype = type(name)
if nametype == "string" then
- logs.report("mathematics","%s:%s %s U+%05X (%s) => %s",classname,class,family,unicode,utfchar(unicode),name)
+ report_math("%s:%s %s U+%05X (%s) => %s",classname,class,family,unicode,utfchar(unicode),name)
elseif nametype == "number" then
- logs.report("mathematics","%s:%s %s U+%05X (%s) => U+%05X",classname,class,family,unicode,utfchar(unicode),name)
+ report_math("%s:%s %s U+%05X (%s) => U+%05X",classname,class,family,unicode,utfchar(unicode),name)
else
- logs.report("mathematics","%s:%s %s U+%05X (%s)", classname,class,family,unicode,utfchar(unicode))
+ report_math("%s:%s %s U+%05X (%s)", classname,class,family,unicode,utfchar(unicode))
end
end
diff --git a/tex/context/base/math-ini.mkiv b/tex/context/base/math-ini.mkiv
index 828a6eccb..8feeae432 100644
--- a/tex/context/base/math-ini.mkiv
+++ b/tex/context/base/math-ini.mkiv
@@ -250,10 +250,10 @@
{\normalhbox\bgroup\mf
\dowithnextbox{\flushnextbox\egroup}\normalhbox}
-\def\mbox
+\def\mbox % we cannot add \dontleavehmode ... else no \setbox0\mbox possible
{\ifmmode\normalmbox\else\normalhbox\fi}
-\def\enablembox
+\unexpanded\def\enablembox
{\appendtoks
\ifx\normalhbox\undefined\let\normalhbox\hbox\fi
\let\hbox\mbox
diff --git a/tex/context/base/math-int.mkiv b/tex/context/base/math-int.mkiv
index 2af471b5c..9bb7b1a14 100644
--- a/tex/context/base/math-int.mkiv
+++ b/tex/context/base/math-int.mkiv
@@ -53,22 +53,45 @@
%D More integrals (AM):
-\definemathcommand [iint] {\repeatintegral\plusone }
-\definemathcommand [iiint] {\repeatintegral\plustwo }
-\definemathcommand [iiiint] {\repeatintegral\plusthree}
-
%def\integralrepeatsymbol{\intop}
\def\integralrepeatsymbol{{\int}}
-\def\repeatintegral#1%
+% \def\repeatintegral#1%
+% {\scratchtoks\emptytoks
+% \let\dointlimits\donothing
+% \let\dodointlimits\intlimits
+% \dorecurse{#1}{\appendtoks \integralrepeatsymbol \dointkern \to \scratchtoks}%
+% \appendtoks \intop \dointlimits \dodointlimits \to \scratchtoks
+% \edef\dodorepeatintegral{\the\scratchtoks}%
+% \futurelet\next\dorepeatintegral}
+
+% \definemathcommand [iint] {\repeatintegral\plusone }
+% \definemathcommand [iiint] {\repeatintegral\plustwo }
+% \definemathcommand [iiiint] {\repeatintegral\plusthree}
+
+\def\fakerepeatintegral#1%
{\scratchtoks\emptytoks
- \let\dointlimits\donothing
- \let\dodointlimits\intlimits
- \dorecurse{#1}{\appendtoks \integralrepeatsymbol \dointkern \to \scratchtoks}
+ \dorecurse{#1}{\appendtoks \integralrepeatsymbol \dointkern \to \scratchtoks}%
\appendtoks \intop \dointlimits \dodointlimits \to \scratchtoks
- \edef\dodorepeatintegral{\the\scratchtoks}%
+ \edef\dodorepeatintegral{\the\scratchtoks}}
+
+\def\repeatintegral#1#2#3%
+ {\let\dointlimits\donothing
+ \let\dodointlimits\intlimits
+ \iffontchar\textfont\zerocount#1\relax
+ %\edef\dodorepeatintegral{\utfchar{#1}}%
+ \let\dodorepeatintegral#2%
+ \else
+ \fakerepeatintegral{#3}%
+ \fi
\futurelet\next\dorepeatintegral}
+% This is a temporary solution, as we will make a virtual glyph in lm.
+
+\definemathcommand [iint] {\repeatintegral{"222B}\normaliint \plusone }
+\definemathcommand [iiint] {\repeatintegral{"222C}\normaliiint \plustwo }
+\definemathcommand [iiiint] {\repeatintegral{"222D}\normaliiiint\plusthree}
+
%D If the \type{\limits} option is used after \type{\iint}, use
%D \type{\mathop} and fudge the left hand space a bit to make the
%D subscript visually centered.
diff --git a/tex/context/base/math-map.lua b/tex/context/base/math-map.lua
index 2d34dc1c3..b6a12bf31 100644
--- a/tex/context/base/math-map.lua
+++ b/tex/context/base/math-map.lua
@@ -26,6 +26,8 @@ local texattribute = tex.attribute
local trace_greek = false trackers.register("math.greek", function(v) trace_greek = v end)
+local report_math = logs.new("mathematics")
+
mathematics = mathematics or { }
-- we could use one level less and have tf etc be tables directly but the
@@ -400,7 +402,7 @@ function mathematics.remap_alphabets(char,mathalphabet,mathgreek)
local alphabet = r and r.alphabet or "regular"
local style = r and r.style or "tf"
if trace_greek then
- logs.report("math","before: char: %05X, alphabet: %s %s, lcgreek: %s, ucgreek: %s",char,alphabet,style,remapping[lc].what,remapping[uc].what)
+ report_math("before: char: %05X, alphabet: %s %s, lcgreek: %s, ucgreek: %s",char,alphabet,style,remapping[lc].what,remapping[uc].what)
end
local s = remapping[islc or isuc][style]
if s then
@@ -408,7 +410,7 @@ function mathematics.remap_alphabets(char,mathalphabet,mathgreek)
mathalphabet, style = data and data.attribute or mathalphabet, s
end
if trace_greek then
- logs.report("math","after : char: %05X, alphabet: %s %s, lcgreek: %s, ucgreek: %s",char,alphabet,style,remapping[lc].what,remapping[uc].what)
+ report_math("after : char: %05X, alphabet: %s %s, lcgreek: %s, ucgreek: %s",char,alphabet,style,remapping[lc].what,remapping[uc].what)
end
end
end
@@ -420,13 +422,13 @@ function mathematics.remap_alphabets(char,mathalphabet,mathgreek)
-- nothing to remap
elseif char >= 0x030 and char <= 0x039 then
local o = offset.digits
- newchar = (type(o) == "table" and (o[char] or char)) or (char - 0x030 + o)
+ newchar = o and ((type(o) == "table" and (o[char] or char)) or (char - 0x030 + o))
elseif char >= 0x041 and char <= 0x05A then
local o = offset.ucletters
- newchar = (type(o) == "table" and (o[char] or char)) or (char - 0x041 + o)
+ newchar = o and ((type(o) == "table" and (o[char] or char)) or (char - 0x041 + o))
elseif char >= 0x061 and char <= 0x07A then
local o = offset.lcletters
- newchar = (type(o) == "table" and (o[char] or char)) or (char - 0x061 + o)
+ newchar = o and ((type(o) == "table" and (o[char] or char)) or (char - 0x061 + o))
elseif islcgreek[char] then
newchar = offset.lcgreek[char]
elseif isucgreek[char] then
diff --git a/tex/context/base/math-noa.lua b/tex/context/base/math-noa.lua
index 02bbe0a62..20f7e5d12 100644
--- a/tex/context/base/math-noa.lua
+++ b/tex/context/base/math-noa.lua
@@ -27,6 +27,8 @@ local trace_remapping = false trackers.register("math.remapping", function(v)
local trace_processing = false trackers.register("math.processing", function(v) trace_processing = v end)
local trace_analyzing = false trackers.register("math.analyzing", function(v) trace_analyzing = v end)
+local report_noads = logs.new("noads")
+
local noad_ord = 0
local noad_op_displaylimits = 1
local noad_op_limits = 2
@@ -85,50 +87,53 @@ local all_noads = {
noads.processors = noads.processors or { }
-local function process(start,what,n)
+local function process(start,what,n,parent)
if n then n = n + 1 else n = 0 end
while start do
if trace_processing then
- logs.report("math","%s%s",rep(" ",n or 0),tostring(start))
+ report_noads("%s%s",rep(" ",n or 0),tostring(start))
end
local id = start.id
local proc = what[id]
if proc then
- proc(start,what,n)
+ local done, newstart = proc(start,what,n,parent or start.prev)
+ if newstart then
+ start = newstart
+ end
elseif id == math_char or id == math_text_char or id == math_delim then
break
elseif id == math_style then
-- has a next
elseif id == math_noad then
- local noad = start.nucleus if noad then process(noad,what,n) end -- list
- noad = start.sup if noad then process(noad,what,n) end -- list
- noad = start.sub if noad then process(noad,what,n) end -- list
+ local noad = start.nucleus if noad then process(noad,what,n,start) end -- list
+ noad = start.sup if noad then process(noad,what,n,start) end -- list
+ noad = start.sub if noad then process(noad,what,n,start) end -- list
elseif id == math_box or id == math_sub then
- local noad = start.list if noad then process(noad,what,n) end -- list
+ local noad = start.list if noad then process(noad,what,n,start) end -- list
elseif id == math_fraction then
- local noad = start.num if noad then process(noad,what,n) end -- list
- noad = start.denom if noad then process(noad,what,n) end -- list
- noad = start.left if noad then process(noad,what,n) end -- delimiter
- noad = start.right if noad then process(noad,what,n) end -- delimiter
+ local noad = start.num if noad then process(noad,what,n,start) end -- list
+ noad = start.denom if noad then process(noad,what,n,start) end -- list
+ noad = start.left if noad then process(noad,what,n,start) end -- delimiter
+ noad = start.right if noad then process(noad,what,n,start) end -- delimiter
elseif id == math_choice then
- local noad = start.display if noad then process(noad,what,n) end -- list
- noad = start.text if noad then process(noad,what,n) end -- list
- noad = start.script if noad then process(noad,what,n) end -- list
- noad = start.scriptscript if noad then process(noad,what,n) end -- list
+ local noad = start.display if noad then process(noad,what,n,start) end -- list
+ noad = start.text if noad then process(noad,what,n,start) end -- list
+ noad = start.script if noad then process(noad,what,n,start) end -- list
+ noad = start.scriptscript if noad then process(noad,what,n,start) end -- list
elseif id == math_fence then
- local noad = start.delim if noad then process(noad,what,n) end -- delimiter
+ local noad = start.delim if noad then process(noad,what,n,start) end -- delimiter
elseif id == math_radical then
- local noad = start.nucleus if noad then process(noad,what,n) end -- list
- noad = start.sup if noad then process(noad,what,n) end -- list
- noad = start.sub if noad then process(noad,what,n) end -- list
- noad = start.left if noad then process(noad,what,n) end -- delimiter
- noad = start.degree if noad then process(noad,what,n) end -- list
+ local noad = start.nucleus if noad then process(noad,what,n,start) end -- list
+ noad = start.sup if noad then process(noad,what,n,start) end -- list
+ noad = start.sub if noad then process(noad,what,n,start) end -- list
+ noad = start.left if noad then process(noad,what,n,start) end -- delimiter
+ noad = start.degree if noad then process(noad,what,n,start) end -- list
elseif id == math_accent then
- local noad = start.nucleus if noad then process(noad,what,n) end -- list
- noad = start.sup if noad then process(noad,what,n) end -- list
- noad = start.sub if noad then process(noad,what,n) end -- list
- noad = start.accent if noad then process(noad,what,n) end -- list
- noad = start.bot_accent if noad then process(noad,what,n) end -- list
+ local noad = start.nucleus if noad then process(noad,what,n,start) end -- list
+ noad = start.sup if noad then process(noad,what,n,start) end -- list
+ noad = start.sub if noad then process(noad,what,n,start) end -- list
+ noad = start.accent if noad then process(noad,what,n,start) end -- list
+ noad = start.bot_accent if noad then process(noad,what,n,start) end -- list
else
-- glue, penalty, etc
end
@@ -146,7 +151,7 @@ local mathgreek = attributes.private("mathgreek")
noads.processors.relocate = { }
local function report_remap(tag,id,old,new,extra)
- logs.report("math","remapping %s in font %s from U+%04X (%s) to U+%04X (%s)%s",tag,id,old,utfchar(old),new,utfchar(new),extra or "")
+ report_noads("remapping %s in font %s from U+%04X (%s) to U+%04X (%s)%s",tag,id,old,utfchar(old),new,utfchar(new),extra or "")
end
local remap_alphabets = mathematics.remap_alphabets
@@ -247,9 +252,9 @@ end
local mathsize = attributes.private("mathsize")
-noads.processors.resize = { }
+local resize = { } noads.processors.resize = resize
-noads.processors.resize[math_fence] = function(pointer)
+resize[math_fence] = function(pointer)
if pointer.subtype == 1 then -- left
local a = has_attribute(pointer,mathsize)
if a and a > 0 then
@@ -266,7 +271,7 @@ noads.processors.resize[math_fence] = function(pointer)
end
function noads.resize_characters(head,style,penalties)
- process(head,noads.processors.resize)
+ process(head,resize)
return true
end
@@ -274,13 +279,13 @@ end
local mathpunctuation = attributes.private("mathpunctuation")
-noads.processors.respace = { }
+local respace = { } noads.processors.respace = respace
local chardata = characters.data
-- only [nd,ll,ul][po][nd,ll,ul]
-noads.processors.respace[math_noad] = function(pointer)
+respace[math_noad] = function(pointer)
if pointer.subtype == noad_ord then
local a = has_attribute(pointer,mathpunctuation)
if a and a > 0 then
@@ -327,9 +332,8 @@ noads.processors.respace[math_noad] = function(pointer)
end
end
-
function noads.respace_characters(head,style,penalties)
- noads.process(head,noads.processors.respace)
+ process(head,respace)
return true
end
diff --git a/tex/context/base/math-vfu.lua b/tex/context/base/math-vfu.lua
index 5023e6b4d..dccb30c92 100644
--- a/tex/context/base/math-vfu.lua
+++ b/tex/context/base/math-vfu.lua
@@ -11,10 +11,13 @@ if not modules then modules = { } end modules ['math-vfu'] = {
-- characters report it to the ConTeXt mailing list.
local type, next = type, next
+local max = math.max
local trace_virtual = false trackers.register("math.virtual", function(v) trace_virtual = v end)
local trace_timings = false trackers.register("math.timings", function(v) trace_timings = v end)
+local report_virtual = logs.new("virtual math")
+
fonts.enc.math = fonts.enc.math or { }
local shared = { }
@@ -69,20 +72,19 @@ local function brace(main,characters,id,size,unicode,first,rule,left,right,rule,
end
local function arrow(main,characters,id,size,unicode,arrow,minus,isleft)
- if characters[unicode] then
- if isleft then
- t = {
- { extender = 0, glyph = arrow },
- { extender = 1, glyph = minus },
- }
- else
- t = {
- { extender = 0, glyph = minus },
- { extender = 1, glyph = arrow },
- }
- end
- --~ main.characters[unicode] = { horiz_variants = t }
- characters[unicode].horiz_variants = t
+ local chr = characters[unicode]
+ if not chr then
+ -- skip
+ elseif isleft then
+ chr.horiz_variants = {
+ { extender = 0, glyph = arrow },
+ { extender = 1, glyph = minus },
+ }
+ else
+ chr.horiz_variants = {
+ { extender = 0, glyph = minus },
+ { extender = 1, glyph = arrow },
+ }
end
end
@@ -226,32 +228,107 @@ local function vertbar(main,characters,id,size,parent,scale,unicode)
end
end
+local function jointwo(main,characters,id,size,unicode,u1,d12,u2)
+ local c1, c2 = characters[u1], characters[u2]
+ if c1 and c2 then
+ local w1, w2 = c1.width, c2.width
+ local mu = size/18
+ characters[unicode] = {
+ width = w1 + w2 - d12*mu,
+ height = max(c1.height or 0, c2.height or 0),
+ depth = max(c1.depth or 0, c2.depth or 0),
+ commands = {
+ { "slot", id, u1 },
+ { "right", -d12*mu } ,
+ { "slot", id, u2 },
+ }
+ }
+ end
+end
+
+local function jointhree(main,characters,id,size,unicode,u1,d12,u2,d23,u3)
+ local c1, c2, c3 = characters[u1], characters[u2], characters[u3]
+ if c1 and c2 and c3 then
+ local w1, w2, w3 = c1.width, c2.width, c3.width
+ local mu = size/18
+ characters[unicode] = {
+ width = w1 + w2 + w3 - d12*mu - d23*mu,
+ height = max(c1.height or 0, c2.height or 0, c3.height or 0),
+ depth = max(c1.depth or 0, c2.depth or 0, c3.depth or 0),
+ commands = {
+ { "slot", id, u1 },
+ { "right", - d12*mu } ,
+ { "slot", id, u2 },
+ { "right", - d23*mu },
+ { "slot", id, u3 },
+ }
+ }
+ end
+end
+
+local function stack(main,characters,id,size,unicode,u1,d12,u2)
+ local c1, c2 = characters[u1], characters[u2]
+ if c1 and c2 then
+ local w1, w2 = c1.width, c2.width
+ local h1, h2 = c1.height, c2.height
+ local d1, d2 = c1.depth, c2.depth
+ local mu = size/18
+ characters[unicode] = {
+ width = w1,
+ height = h1 + h2 + d12,
+ depth = d1,
+ commands = {
+ { "slot", id, u1 },
+ { "right", - w1/2 - w2/2 } ,
+ { "down", -h1 + d2 -d12*mu } ,
+ { "slot", id, u2 },
+ }
+ }
+ end
+end
+
function fonts.vf.math.alas(main,id,size)
local characters = main.characters
for i=0x7A,0x7D do
make(main,characters,id,size,i,1)
end
- brace (main,characters,id,size,0x23DE,0xFF17A,0xFF301,0xFF17D,0xFF17C,0xFF301,0xFF17B)
- brace (main,characters,id,size,0x23DF,0xFF27C,0xFF401,0xFF27B,0xFF27A,0xFF401,0xFF27D)
- parent (main,characters,id,size,0x23DC,0xFF17A,0xFF301,0xFF17B)
- parent (main,characters,id,size,0x23DD,0xFF27C,0xFF401,0xFF27D)
- negate (main,characters,id,size,0x2260,0x003D)
- dots (main,characters,id,size,0x2026) -- ldots
- dots (main,characters,id,size,0x22EE) -- vdots
- dots (main,characters,id,size,0x22EF) -- cdots
- dots (main,characters,id,size,0x22F1) -- ddots
- dots (main,characters,id,size,0x22F0) -- udots
- minus (main,characters,id,size,0xFF501)
- arrow (main,characters,id,size,0x2190,0xFE190,0xFF501,true) -- left
- arrow (main,characters,id,size,0x2192,0xFE192,0xFF501,false) -- right
- vertbar(main,characters,id,size,0x0007C,0.10,0xFF601) -- big : 0.85 bodyfontsize
- vertbar(main,characters,id,size,0xFF601,0.30,0xFF602) -- Big : 1.15 bodyfontsize
- vertbar(main,characters,id,size,0xFF602,0.30,0xFF603) -- bigg : 1.45 bodyfontsize
- vertbar(main,characters,id,size,0xFF603,0.30,0xFF604) -- Bigg : 1.75 bodyfontsize
- vertbar(main,characters,id,size,0x02225,0.10,0xFF605)
- vertbar(main,characters,id,size,0xFF605,0.30,0xFF606)
- vertbar(main,characters,id,size,0xFF606,0.30,0xFF607)
- vertbar(main,characters,id,size,0xFF607,0.30,0xFF608)
+ brace (main,characters,id,size,0x23DE,0xFF17A,0xFF301,0xFF17D,0xFF17C,0xFF301,0xFF17B)
+ brace (main,characters,id,size,0x23DF,0xFF27C,0xFF401,0xFF27B,0xFF27A,0xFF401,0xFF27D)
+ parent (main,characters,id,size,0x23DC,0xFF17A,0xFF301,0xFF17B)
+ parent (main,characters,id,size,0x23DD,0xFF27C,0xFF401,0xFF27D)
+ negate (main,characters,id,size,0x2260,0x003D)
+ dots (main,characters,id,size,0x2026) -- ldots
+ dots (main,characters,id,size,0x22EE) -- vdots
+ dots (main,characters,id,size,0x22EF) -- cdots
+ dots (main,characters,id,size,0x22F1) -- ddots
+ dots (main,characters,id,size,0x22F0) -- udots
+ minus (main,characters,id,size,0xFF501)
+ arrow (main,characters,id,size,0x2190,0xFE190,0xFF501,true) -- left
+ arrow (main,characters,id,size,0x2192,0xFE192,0xFF501,false) -- right
+ vertbar (main,characters,id,size,0x0007C,0.10,0xFF601) -- big : 0.85 bodyfontsize
+ vertbar (main,characters,id,size,0xFF601,0.30,0xFF602) -- Big : 1.15 bodyfontsize
+ vertbar (main,characters,id,size,0xFF602,0.30,0xFF603) -- bigg : 1.45 bodyfontsize
+ vertbar (main,characters,id,size,0xFF603,0.30,0xFF604) -- Bigg : 1.75 bodyfontsize
+ vertbar (main,characters,id,size,0x02016,0.10,0xFF605)
+ vertbar (main,characters,id,size,0xFF605,0.30,0xFF606)
+ vertbar (main,characters,id,size,0xFF606,0.30,0xFF607)
+ vertbar (main,characters,id,size,0xFF607,0.30,0xFF608)
+ jointwo (main,characters,id,size,0x21A6,0xFE321,0,0x02192) -- \mapstochar\rightarrow
+ jointwo (main,characters,id,size,0x21A9,0x02190,3,0xFE323) -- \leftarrow\joinrel\rhook
+ jointwo (main,characters,id,size,0x21AA,0xFE322,3,0x02192) -- \lhook\joinrel\rightarrow
+ stack (main,characters,id,size,0x2259,0x0003D,3,0x02227) -- \buildrel\wedge\over=
+ jointwo (main,characters,id,size,0x22C8,0x022B3,4,0x022B2) -- \mathrel\triangleright\joinrel\mathrel\triangleleft (4 looks better than 3)
+ jointwo (main,characters,id,size,0x2284,0x00338,0,0x02282) -- \not\subset
+ jointwo (main,characters,id,size,0x2285,0x00338,0,0x02283) -- \not\supset
+ jointwo (main,characters,id,size,0x22A7,0x0007C,3,0x0003D) -- \mathrel|\joinrel=
+ jointwo (main,characters,id,size,0x27F5,0x02190,3,0x0002D) -- \leftarrow\joinrel\relbar
+ jointwo (main,characters,id,size,0x27F6,0x0002D,3,0x02192) -- \relbar\joinrel\rightarrow
+ jointwo (main,characters,id,size,0x27F7,0x02190,3,0x02192) -- \leftarrow\joinrel\rightarrow
+ jointwo (main,characters,id,size,0x27F8,0x021D0,3,0x0003D) -- \Leftarrow\joinrel\Relbar
+ jointwo (main,characters,id,size,0x27F9,0x0003D,3,0x021D2) -- \Relbar\joinrel\Rightarrow
+ jointwo (main,characters,id,size,0x27FA,0x021D0,3,0x021D2) -- \Leftarrow\joinrel\Rightarrow
+ jointhree(main,characters,id,size,0x27FB,0x02190,3,0x0002D,0,0xFE324) -- \leftarrow\joinrel\relbar\mapsfromchar
+ jointhree(main,characters,id,size,0x27FC,0xFE321,0,0x0002D,3,0x02192) -- \mapstochar\relbar\joinrel\rightarrow
end
local unique = 0 -- testcase: \startTEXpage \math{!\text{-}\text{-}\text{-}} \stopTEXpage
@@ -268,7 +345,7 @@ function fonts.basecopy(tfmtable,name)
end
t.characters = c
else
- logs.report("math virtual","font %s has no characters",name)
+ report_virtual("font %s has no characters",name)
end
if parameters then
for k, v in next, parameters do
@@ -276,7 +353,7 @@ function fonts.basecopy(tfmtable,name)
end
t.parameters = p
else
- logs.report("math virtual","font %s has no parameters",name)
+ report_virtual("font %s has no parameters",name)
end
-- tricky ... what if fullname does not exist
if fullname then
@@ -310,14 +387,14 @@ function fonts.vf.math.define(specification,set)
local ssname = ss.name
if ss.optional and fonts.vf.math.optional then
if trace_virtual then
- logs.report("math virtual","loading font %s subfont %s with name %s at %s is skipped",name,s,ssname,size)
+ report_virtual("loading font %s subfont %s with name %s at %s is skipped",name,s,ssname,size)
end
else
if ss.features then ssname = ssname .. "*" .. ss.features end
if ss.main then main = s end
local f, id = fonts.tfm.read_and_define(ssname,size)
if not f then
- logs.report("math virtual","loading font %s subfont %s with name %s at %s is skipped, not found",name,s,ssname,size)
+ report_virtual("loading font %s subfont %s with name %s at %s is skipped, not found",name,s,ssname,size)
else
n = n + 1
okset[n] = ss
@@ -325,7 +402,30 @@ function fonts.vf.math.define(specification,set)
lst[n] = { id = id, size = size }
if not shared[s] then shared[n] = { } end
if trace_virtual then
- logs.report("math virtual","loading font %s subfont %s with name %s at %s as id %s using encoding %s",name,s,ssname,size,id,ss.vector or "none")
+ report_virtual("loading font %s subfont %s with name %s at %s as id %s using encoding %s",name,s,ssname,size,id,ss.vector or "none")
+ end
+ if not ss.checked then
+ ss.checked = true
+ local vector = fonts.enc.math[ss.vector]
+ if vector then
+ -- we resolve named glyphs only once as we can assume that vectors
+ -- are unique to a font set (when we read an afm we get those names
+ -- mapped onto the private area)
+ for unicode, index in next, vector do
+ if not tonumber(index) then
+ local u = f.unicodes
+ u = u and u[index]
+ if u then
+ if trace_virtual then
+ report_virtual("resolving name %s to %s",index,u)
+ end
+ else
+ report_virtual("unable to resolve name %s",index)
+ end
+ vector[unicode] = u
+ end
+ end
+ end
end
end
end
@@ -356,7 +456,7 @@ function fonts.vf.math.define(specification,set)
mm.big_op_spacing3 = fp[11] or 0 -- big_op_spacing3 minimum baselineskip above displayed op
mm.big_op_spacing4 = fp[12] or 0 -- big_op_spacing4 minimum baselineskip below displayed op
mm.big_op_spacing5 = fp[13] or 0 -- big_op_spacing5 padding above and below displayed limits
- -- logs.report("math virtual","loading and virtualizing font %s at size %s, setting ex parameters",name,size)
+ -- report_virtual("loading and virtualizing font %s at size %s, setting ex parameters",name,size)
elseif ss.parameters then
mp.x_height = fp.x_height or mp.x_height
mm.x_height = mm.x_height or fp.x_height or 0 -- x_height height of x
@@ -375,10 +475,10 @@ function fonts.vf.math.define(specification,set)
mm.delim1 = fp[20] or 0 -- delim1 size of \atopwithdelims delimiters in display styles
mm.delim2 = fp[21] or 0 -- delim2 size of \atopwithdelims delimiters in non-displays
mm.axis_height = fp[22] or 0 -- axis_height height of fraction lines above the baseline
- -- logs.report("math virtual","loading and virtualizing font %s at size %s, setting sy parameters",name,size)
+ -- report_virtual("loading and virtualizing font %s at size %s, setting sy parameters",name,size)
end
else
- logs.report("math virtual","font %s, no parameters set",name)
+ report_virtual("font %s, no parameters set",name)
end
local vectorname = ss.vector
if vectorname then
@@ -399,9 +499,9 @@ function fonts.vf.math.define(specification,set)
local ru = rv[unicode]
if not ru then
if trace_virtual then
- logs.report("math virtual", "unicode point U+%05X has no index %04X in vector %s for font %s",unicode,index,vectorname,fontname)
+ report_virtual( "unicode point U+%05X has no index %04X in vector %s for font %s",unicode,index,vectorname,fontname)
elseif not already_reported then
- logs.report("math virtual", "the mapping is incomplete for '%s' at %s",name,number.topoints(size))
+ report_virtual( "the mapping is incomplete for '%s' at %s",name,number.topoints(size))
already_reported = true
end
rv[unicode] = true
@@ -577,7 +677,7 @@ function fonts.vf.math.define(specification,set)
fonts.vf.math.alas(main,#lst,size)
end
if trace_virtual or trace_timings then
- logs.report("math virtual","loading and virtualizing font %s at size %s took %0.3f seconds",name,size,os.clock()-start)
+ report_virtual("loading and virtualizing font %s at size %s took %0.3f seconds",name,size,os.clock()-start)
end
main.has_italic = true
main.type = "virtual" -- not needed
@@ -1015,7 +1115,8 @@ fonts.enc.math["tex-sy"] = {
[0x027E8] = 0x68, -- <, langle
[0x027E9] = 0x69, -- >, rangle
[0x0007C] = 0x6A, -- |, mid, lvert, rvert
- [0x02225] = 0x6B, -- parallel, Vert, lVert, rVert, arrowvert
+ [0x02225] = 0x6B, -- parallel
+ -- [0x02016] = 0x00, -- Vert, lVert, rVert, arrowvert, Arrowvert
[0x02195] = 0x6C, -- updownarrow
[0x021D5] = 0x6D, -- Updownarrow
[0x0005C] = 0x6E, -- \, backslash, setminus
@@ -1304,6 +1405,11 @@ fonts.enc.math["tex-mb"] = {
[0x003F6] = 0x7F, -- epsiloninv \backepsilon
}
+fonts.enc.math["tex-mc"] = {
+ -- this file has no tfm so it gets mapped in the private space
+ [0xFE324] = "mapsfromchar",
+}
+
fonts.enc.math["tex-fraktur"] = {
-- [0x1D504] = 0x41, -- A (fraktur A)
-- [0x1D505] = 0x42, -- B
diff --git a/tex/context/base/meta-ini.mkiv b/tex/context/base/meta-ini.mkiv
index 61acbca32..1072cb8f2 100644
--- a/tex/context/base/meta-ini.mkiv
+++ b/tex/context/base/meta-ini.mkiv
@@ -134,8 +134,6 @@
\def\endMPgraphicgroup
{\endgroup}
-\newconditional \METAFUNinitialized
-
\def\MPaskedfigure{false}
\def\currentMPinitializations
@@ -160,7 +158,6 @@
\def\dostopcurrentMPgraphic
{\global\MPinstancetoks\emptytoks
- \global\settrue\METAFUNinitialized % becomes obsolete
\endgroup}
\unexpanded\long\def\processMPgraphic#1% todo: extensions and inclusions outside beginfig
@@ -1210,7 +1207,7 @@
%D In any case we need to tell the converter what the inherited color
%D is to start with. Case~3 is kind of unpredictable as it closely
%D relates to the order in which paths are flushed. If you want to
-%Dinherit automatically from the surrounding, you can best stick to
+%D inherit automatically from the surrounding, you can best stick to
%D variant 1. Variant 0 (an isolated graphic) is the default.
%D
%D \startbuffer
diff --git a/tex/context/base/meta-pdf.lua b/tex/context/base/meta-pdf.lua
index 23f8d4de0..9a9b13028 100644
--- a/tex/context/base/meta-pdf.lua
+++ b/tex/context/base/meta-pdf.lua
@@ -15,6 +15,8 @@ local lpegmatch = lpeg.match
local round = math.round
local texsprint, ctxcatcodes = tex.sprint, tex.ctxcatcodes
+local report_mptopdf = logs.new("mptopdf")
+
local pdfrgbcode = lpdf.rgbcode
local pdfcmykcode = lpdf.cmykcode
local pdfgraycode = lpdf.graycode
@@ -55,7 +57,7 @@ local function texcode(str)
texsprint(ctxcatcodes,str)
end
-function mpscode(str)
+local function mpscode(str)
if ignore_path then
pdfcode("h W n")
if extra_path_code then
@@ -304,9 +306,9 @@ end
-- not supported in mkiv , use mplib instead
-handlers[10] = function() logs.report("mptopdf","skipping special %s",10) end
-handlers[20] = function() logs.report("mptopdf","skipping special %s",20) end
-handlers[50] = function() logs.report("mptopdf","skipping special %s",50) end
+handlers[10] = function() report_mptopdf("skipping special %s",10) end
+handlers[20] = function() report_mptopdf("skipping special %s",20) end
+handlers[50] = function() report_mptopdf("skipping special %s",50) end
--end of not supported
@@ -320,7 +322,7 @@ function mps.setrgbcolor(r,g,b) -- extra check
if handler then
handler(s)
else
- logs.report("mptopdf","unknown special handler %s (1)",h)
+ report_mptopdf("unknown special handler %s (1)",h)
end
elseif r == 0.123 and g < 0.1 then
g, b = round(g*1000), round(b*1000)
@@ -330,7 +332,7 @@ function mps.setrgbcolor(r,g,b) -- extra check
if handler then
handler(s)
else
- logs.report("mptopdf","unknown special handler %s (2)",h)
+ report_mptopdf("unknown special handler %s (2)",h)
end
else
pdfcode(pdffinishtransparencycode())
diff --git a/tex/context/base/metatex.lus b/tex/context/base/metatex.lus
new file mode 100644
index 000000000..df7bc1914
--- /dev/null
+++ b/tex/context/base/metatex.lus
@@ -0,0 +1,9 @@
+if not modules then modules = { } end modules ['metatex'] = {
+ version = 1.001,
+ comment = "companion to metatex.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return "luat-cod.lua"
diff --git a/tex/context/base/metatex.tex b/tex/context/base/metatex.tex
index e90af709c..d99f75ead 100644
--- a/tex/context/base/metatex.tex
+++ b/tex/context/base/metatex.tex
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D We can experiment here with runtime loading, id est no longer
+%D We can experiment here with runtime loading, i.e. no longer
%D use a format. However, we still need a stub then but it could
%D as well be luatools (mtxrun) itself then.
@@ -51,7 +51,9 @@
\newtoks\metatexversiontoks \metatexversiontoks\expandafter{\metatexversion} % at the lua end
+%loadcorefile{norm-ctx}
\loadcorefile{syst-pln} % plain tex initializations of internal registers (no further code)
+\loadmarkfile{syst-mes}
\loadmarkfile{luat-cod} %
\loadmarkfile{luat-bas} %
@@ -85,6 +87,10 @@
\loadmarkfile{char-ini}
\loadmarkfile{char-enc} % \registerctxluafile{char-enc}{1.001}
+% attributes
+
+\loadmarkfile{attr-ini}
+
% nodes
\loadmarkfile{node-ini}
diff --git a/tex/context/base/mlib-ctx.lua b/tex/context/base/mlib-ctx.lua
index cc5682e6f..d04d9b370 100644
--- a/tex/context/base/mlib-ctx.lua
+++ b/tex/context/base/mlib-ctx.lua
@@ -11,6 +11,8 @@ if not modules then modules = { } end modules ['mlib-ctx'] = {
local format, join = string.format, table.concat
local sprint = tex.sprint
+local report_mplib = logs.new("mplib")
+
local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming
metapost = metapost or {}
@@ -29,7 +31,7 @@ function metapost.getclippath(instance,mpsformat,data,initializations,preamble)
local result = mpx:execute(format("%s;beginfig(1);%s;%s;endfig;",preamble or "",initializations or "",data))
stoptiming(metapost.exectime)
if result.status > 0 then
- logs.report("metafun", "%s: %s", result.status, result.error or result.term or result.log)
+ report_mplib("%s: %s", result.status, result.error or result.term or result.log)
result = nil
else
result = metapost.filterclippath(result)
diff --git a/tex/context/base/mlib-pdf.lua b/tex/context/base/mlib-pdf.lua
index 352070408..28f9c57ca 100644
--- a/tex/context/base/mlib-pdf.lua
+++ b/tex/context/base/mlib-pdf.lua
@@ -10,6 +10,8 @@ local format, concat, gsub = string.format, table.concat, string.gsub
local texsprint = tex.sprint
local abs, sqrt, round = math.abs, math.sqrt, math.round
+local report_mplib = logs.new("mplib")
+
local copy_node, write_node = node.copy, node.write
local ctxcatcodes = tex.ctxcatcodes
@@ -69,7 +71,7 @@ function metapost.flush_literal(d) -- \def\MPLIBtoPDF#1{\ctxlua{metapost.flush_l
literal.data = savedliterals[d]
write_node(literal)
else
- logs.report("metapost","problem flushing literal %s",d)
+ report_mplib("problem flushing literal %s",d)
end
end
diff --git a/tex/context/base/mlib-pps.lua b/tex/context/base/mlib-pps.lua
index 8b36660d3..f00c99cdb 100644
--- a/tex/context/base/mlib-pps.lua
+++ b/tex/context/base/mlib-pps.lua
@@ -24,6 +24,8 @@ local ctxcatcodes = tex.ctxcatcodes
local trace_textexts = false trackers.register("metapost.textexts", function(v) trace_textexts = v end)
+local report_mplib = logs.new("mplib")
+
colors = colors or { }
local rgbtocmyk = colors.rgbtocmyk or function() return 0,0,0,1 end
@@ -117,11 +119,11 @@ function metapost.specials.register(str) -- only colors
if cc then
cc[n] = data
else
- logs.report("mplib","problematic special: %s (no colordata class %s)", str or "?",class)
+ report_mplib("problematic special: %s (no colordata class %s)", str or "?",class)
end
else
-- there is some bug to be solved, so we issue a message
- logs.report("mplib","problematic special: %s", str or "?")
+ report_mplib("problematic special: %s", str or "?")
end
end
--~ if match(str,"^%%%%MetaPostOption: multipass") then
@@ -248,7 +250,7 @@ function metapost.specials.ps(specification,object,result) -- positions
local label = specification
x = x - metapost.llx
y = metapost.ury - y
- -- logs.report("mplib", "todo: position '%s' at (%s,%s) with (%s,%s)",label,x,y,w,h)
+ -- report_mplib( "todo: position '%s' at (%s,%s) with (%s,%s)",label,x,y,w,h)
sprint(ctxcatcodes,format("\\dosavepositionwhd{%s}{0}{%sbp}{%sbp}{%sbp}{%sbp}{0pt}",label,x,y,w,h))
return { }, nil, nil, nil
end
@@ -472,8 +474,8 @@ function metapost.specials.tf(specification,object)
-- metapost.textext_current = metapost.first_box + n - 1
-- end
if trace_textexts then
- -- logs.report("metapost","first pass: order %s, box %s",n,metapost.textext_current)
- logs.report("metapost","first pass: order %s",n)
+ -- report_mplib("first pass: order %s, box %s",n,metapost.textext_current)
+ report_mplib("first pass: order %s",n)
end
-- sprint(ctxcatcodes,format("\\MPLIBsettext{%s}{%s}",metapost.textext_current,str))
sprint(ctxcatcodes,format("\\MPLIBsettext{%s}{%s}",n,str))
@@ -488,8 +490,7 @@ function metapost.specials.ts(specification,object,result,flusher)
if n and str then
n = tonumber(n)
if trace_textexts then
- -- logs.report("metapost","second pass: order %s, box %s",n,metapost.textext_current)
- logs.report("metapost","second pass: order %s",n)
+ report_mplib("second pass: order %s",n)
end
local op = object.path
local first, second, fourth = op[1], op[2], op[4]
@@ -700,7 +701,7 @@ do
local texmess = (dquote/ditto + (1 - etex))^0
local function ignore(s)
- logs.report("mplib","ignoring verbatim tex: %s",s)
+ report_mplib("ignoring verbatim tex: %s",s)
return ""
end
@@ -755,7 +756,7 @@ function metapost.text_texts_data()
--~ local box = texbox[i]
for n, box in next, textexts do
if trace_textexts then
- logs.report("metapost","passed data: order %s, box %s",n,i)
+ report_mplib("passed data: order %s",n)
end
if box then
t[#t+1] = format(text_data_template,n,box.width/factor,n,box.height/factor,n,box.depth/factor)
diff --git a/tex/context/base/mlib-pps.mkiv b/tex/context/base/mlib-pps.mkiv
index 0a78a8704..a27eb56df 100644
--- a/tex/context/base/mlib-pps.mkiv
+++ b/tex/context/base/mlib-pps.mkiv
@@ -43,9 +43,13 @@
\def\MPLIBsettext#1% #2%
{\dowithnextbox{\ctxlua{metapost.settext(\number\nextbox,#1)}}\hbox}
+% \def\MPLIBgettextscaled#1#2#3% why a copy
+% {\ctxlua{metapost.gettext(\number\MPtextbox,#1)}% \black has no use here (applied to box)
+% \vbox to \zeropoint{\vss\hbox to \zeropoint{\black\scale[sx=#2,sy=#3]{\raise\dp\MPtextbox\box\MPtextbox}\hss}}}
+
\def\MPLIBgettextscaled#1#2#3% why a copy
- {\ctxlua{metapost.gettext(\number\MPtextbox,#1)}%
- \vbox to \zeropoint{\vss\hbox to \zeropoint{\black\scale[sx=#2,sy=#3]{\raise\dp\MPtextbox\box\MPtextbox}\hss}}}
+ {\ctxlua{metapost.gettext(\number\MPtextbox,#1)}% we need the colorhack or else the color backend does not sync
+ \vbox to \zeropoint{\vss\hbox to \zeropoint{\scale[\c!sx=#2,\c!sy=#3]{\raise\dp\MPtextbox\box\MPtextbox}\forcecolorhack\hss}}}
\def\MPLIBgraphictext#1%
{\startTEXpage[\c!scale=10000]#1\stopTEXpage}
diff --git a/tex/context/base/mlib-run.lua b/tex/context/base/mlib-run.lua
index f352e1db1..b961fa02c 100644
--- a/tex/context/base/mlib-run.lua
+++ b/tex/context/base/mlib-run.lua
@@ -31,6 +31,8 @@ nears zero.</p>
local trace_graphics = false trackers.register("metapost.graphics", function(v) trace_graphics = v end)
+local report_mplib = logs.new("mplib")
+
local format, gsub, match = string.format, string.gsub, string.match
local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming
@@ -121,20 +123,20 @@ end
function metapost.reporterror(result)
if not result then
- metapost.report("mp error: no result object returned")
+ report_mplib("mp error: no result object returned")
elseif result.status > 0 then
local t, e, l = result.term, result.error, result.log
if t and t ~= "" then
- metapost.report("mp terminal: %s",t)
+ report_mplib("mp terminal: %s",t)
end
if e then
- metapost.report("mp error: %s",(e=="" and "?") or e)
+ report_mplib("mp error: %s",(e=="" and "?") or e)
end
if not t and not e and l then
metapost.lastlog = metapost.lastlog .. "\n" .. l
- metapost.report("mp log: %s",l)
+ report_mplib("mp log: %s",l)
else
- metapost.report("mp error: unknown, no error, terminal or log messages")
+ report_mplib("mp error: unknown, no error, terminal or log messages")
end
else
return false
@@ -142,22 +144,63 @@ function metapost.reporterror(result)
return true
end
-function metapost.checkformat(mpsinput, mpsformat, dirname)
- mpsinput = file.addsuffix(mpsinput or "metafun", "mp")
- mpsformat = file.removesuffix(file.basename(mpsformat or texconfig.formatname or (tex and tex.formatname) or mpsinput))
- local mpsbase = file.removesuffix(file.basename(mpsinput))
+--~ function metapost.checkformat(mpsinput)
+--~ local mpsversion = environment.version or "unset version"
+--~ local mpsinput = file.addsuffix(mpsinput or "metafun", "mp")
+--~ local mpsformat = file.removesuffix(file.basename(mpsformat or texconfig.formatname or (tex and tex.formatname) or mpsinput))
+--~ local mpsbase = file.removesuffix(file.basename(mpsinput))
+--~ if mpsbase ~= mpsformat then
+--~ mpsformat = mpsformat .. "-" .. mpsbase
+--~ end
+--~ mpsformat = file.addsuffix(mpsformat, "mem")
+--~ local pth = file.dirname(texconfig.formatname or "") -- to be made dynamic
+--~ if pth ~= "" then
+--~ mpsformat = file.join(pth,mpsformat)
+--~ end
+--~ if lfs.isfile(mpsformat) then
+--~ commands.writestatus("mplib","loading '%s' from '%s'", mpsinput, mpsformat)
+--~ local mpx, result = metapost.load(mpsformat)
+--~ if mpx then
+--~ local result = mpx:execute("show mp_parent_version ;")
+--~ if not result.log then
+--~ metapost.reporterror(result)
+--~ else
+--~ local version = match(result.log,">> *(.-)[\n\r]") or "unknown"
+--~ version = gsub(version,"[\'\"]","")
+--~ if version ~= mpsversion then
+--~ commands.writestatus("mplib","version mismatch: %s <> %s", version or "unknown", mpsversion)
+--~ else
+--~ return mpx
+--~ end
+--~ end
+--~ else
+--~ commands.writestatus("mplib","error in loading '%s' from '%s'", mpsinput, mpsformat)
+--~ metapost.reporterror(result)
+--~ end
+--~ end
+--~ commands.writestatus("mplib","making '%s' into '%s'", mpsinput, mpsformat)
+--~ metapost.make(mpsinput,mpsformat,mpsversion) -- somehow return ... fails here
+--~ if lfs.isfile(mpsformat) then
+--~ commands.writestatus("mplib","loading '%s' from '%s'", mpsinput, mpsformat)
+--~ return metapost.load(mpsformat)
+--~ else
+--~ commands.writestatus("mplib","problems with '%s' from '%s'", mpsinput, mpsformat)
+--~ end
+--~ end
+
+function metapost.checkformat(mpsinput)
+ local mpsversion = environment.version or "unset version"
+ local mpsinput = file.addsuffix(mpsinput or "metafun", "mp")
+ local mpsformat = file.removesuffix(file.basename(texconfig.formatname or (tex and tex.formatname) or mpsinput))
+ local mpsbase = file.removesuffix(file.basename(mpsinput))
if mpsbase ~= mpsformat then
mpsformat = mpsformat .. "-" .. mpsbase
end
mpsformat = file.addsuffix(mpsformat, "mem")
- local pth = dirname or file.dirname(texconfig.formatname or "")
- if pth ~= "" then
- mpsformat = file.join(pth,mpsformat)
- end
- local the_version = environment.version or "unset version"
- if lfs.isfile(mpsformat) then
- commands.writestatus("mplib","loading '%s' from '%s'", mpsinput, mpsformat)
- local mpx, result = metapost.load(mpsformat)
+ local mpsformatfullname = caches.getfirstreadablefile(mpsformat,"formats") or ""
+ if mpsformatfullname ~= "" then
+ commands.writestatus("mplib","loading '%s' from '%s'", mpsinput, mpsformatfullname)
+ local mpx, result = metapost.load(mpsformatfullname)
if mpx then
local result = mpx:execute("show mp_parent_version ;")
if not result.log then
@@ -165,24 +208,25 @@ function metapost.checkformat(mpsinput, mpsformat, dirname)
else
local version = match(result.log,">> *(.-)[\n\r]") or "unknown"
version = gsub(version,"[\'\"]","")
- if version ~= the_version then
- commands.writestatus("mplib","version mismatch: %s <> %s", version or "unknown", the_version)
+ if version ~= mpsversion then
+ commands.writestatus("mplib","version mismatch: %s <> %s", version or "unknown", mpsversion)
else
return mpx
end
end
else
- commands.writestatus("mplib","error in loading '%s' from '%s'", mpsinput, mpsformat)
+ commands.writestatus("mplib","error in loading '%s' from '%s'", mpsinput, mpsformatfullname)
metapost.reporterror(result)
end
end
- commands.writestatus("mplib","making '%s' into '%s'", mpsinput, mpsformat)
- metapost.make(mpsinput,mpsformat,the_version) -- somehow return ... fails here
- if lfs.isfile(mpsformat) then
- commands.writestatus("mplib","loading '%s' from '%s'", mpsinput, mpsformat)
- return metapost.load(mpsformat)
+ local mpsformatfullname = caches.setfirstwritablefile(mpsformat,"formats")
+ commands.writestatus("mplib","making '%s' into '%s'", mpsinput, mpsformatfullname)
+ metapost.make(mpsinput,mpsformatfullname,mpsversion) -- somehow return ... fails here
+ if lfs.isfile(mpsformatfullname) then
+ commands.writestatus("mplib","loading '%s' from '%s'", mpsinput, mpsformatfullname)
+ return metapost.load(mpsformatfullname)
else
- commands.writestatus("mplib","problems with '%s' from '%s'", mpsinput, mpsformat)
+ commands.writestatus("mplib","problems with '%s' from '%s'", mpsinput, mpsformatfullname)
end
end
@@ -258,7 +302,7 @@ function metapost.process(mpx, data, trialrun, flusher, multipass, isextrapass,
local str = (result.term ~= "" and result.term) or "no terminal output"
if not str:is_empty() then
metapost.lastlog = metapost.lastlog .. "\n" .. str
- metapost.report("mp log: %s",str)
+ report_mplib("mp log: %s",str)
end
end
if result.fig then
@@ -266,7 +310,7 @@ function metapost.process(mpx, data, trialrun, flusher, multipass, isextrapass,
end
end
else
- metapost.report("mp error: invalid graphic component %s",i)
+ report_mplib("mp error: invalid graphic component %s",i)
end
end
else
@@ -284,13 +328,13 @@ function metapost.process(mpx, data, trialrun, flusher, multipass, isextrapass,
end
-- todo: error message
if not result then
- metapost.report("mp error: no result object returned")
+ report_mplib("mp error: no result object returned")
elseif result.status > 0 then
- metapost.report("mp error: %s",(result.term or "no-term") .. "\n" .. (result.error or "no-error"))
+ report_mplib("mp error: %s",(result.term or "no-term") .. "\n" .. (result.error or "no-error"))
else
if metapost.showlog then
metapost.lastlog = metapost.lastlog .. "\n" .. result.term
- metapost.report("mp info: %s",result.term or "no-term")
+ report_mplib("mp info: %s",result.term or "no-term")
end
if result.fig then
converted = metapost.convert(result, trialrun, flusher, multipass, askedfig)
@@ -308,11 +352,7 @@ function metapost.process(mpx, data, trialrun, flusher, multipass, isextrapass,
end
function metapost.convert()
- metapost.report('mp warning: no converter set')
-end
-
-function metapost.report(...)
- logs.report("mplib",...)
+ report_mplib('mp warning: no converter set')
end
-- handy
@@ -326,7 +366,7 @@ function metapost.directrun(formatname,filename,outputformat,astable,mpdata)
if not data then
logs.simple("unknown file '%s'",filename or "?")
else
- local mpx = metapost.checkformat(formatname,formatname,caches.setpath("formats"))
+ local mpx = metapost.checkformat(formatname)
if not mpx then
logs.simple("unknown format '%s'",formatname or "?")
else
diff --git a/tex/context/base/mtx-context-arrange.tex b/tex/context/base/mtx-context-arrange.tex
index 73431567d..7b764c495 100644
--- a/tex/context/base/mtx-context-arrange.tex
+++ b/tex/context/base/mtx-context-arrange.tex
@@ -1,4 +1,4 @@
-% engine=luatex
+engine=luatex
%D \module
%D [ file=mtx-context-arrange,
diff --git a/tex/context/base/mult-cld.lua b/tex/context/base/mult-cld.lua
index 81038b68b..a9fb1ff1c 100644
--- a/tex/context/base/mult-cld.lua
+++ b/tex/context/base/mult-cld.lua
@@ -51,7 +51,7 @@ end
function context.trace(intercept)
local normalflush = flush
flush = function(c,...)
- logs.report("context",concat({...}))
+ trace_context(concat({...}))
if not intercept then
normalflush(c,...)
end
@@ -62,6 +62,8 @@ end
trackers.register("context.flush", function(v) if v then context.trace() end end)
trackers.register("context.intercept", function(v) if v then context.trace(true) end end)
+local trace_context = logs.new("context")
+
local function writer(k,...)
if k then
flush(ctxcatcodes,k)
@@ -109,9 +111,9 @@ local function writer(k,...)
flush(ctxcatcodes,tostring(ti))
-- end
elseif typ == "thread" then
- logs.report("interfaces","coroutines not supported as we cannot yeild across boundaries")
+ trace_context("coroutines not supported as we cannot yeild across boundaries")
else
- logs.report("interfaces","error: %s gets a weird argument %s",k,tostring(ti))
+ trace_context("error: %s gets a weird argument %s",k,tostring(ti))
end
end
end
diff --git a/tex/context/base/mult-ini.mkiv b/tex/context/base/mult-ini.mkiv
index e20548f9b..493a04cea 100644
--- a/tex/context/base/mult-ini.mkiv
+++ b/tex/context/base/mult-ini.mkiv
@@ -437,56 +437,74 @@
\def\doignorevalue#1#2#3%
{\dosetvalue{#1}{#2}{}}
-\def\dosetvalue#1#2%
- {\let\c!internal!\c!internal!n
- \ifcsname\k!prefix!#2\endcsname
- \let\c!internal!\c!internal!y
- \@EA\def\csname#1\csname\k!prefix!#2\endcsname%\endcsname
- \else
- \let\c!internal!\c!internal!y
- \@EA\def\csname#1#2%\endcsname
- \fi\endcsname}
-
-\def\dosetevalue#1#2%
- {\let\c!internal!\c!internal!n
- \ifcsname\k!prefix!#2\endcsname
- \let\c!internal!\c!internal!y
- \@EA\edef\csname#1\csname\k!prefix!#2\endcsname%\endcsname
- \else
- \let\c!internal!\c!internal!y
- \@EA\edef\csname#1#2%\endcsname
- \fi\endcsname}
-
-\def\dosetgvalue#1#2%
- {\let\c!internal!\c!internal!n
- \ifcsname\k!prefix!#2\endcsname
- \let\c!internal!\c!internal!y
- \@EA\gdef\csname#1\csname\k!prefix!#2\endcsname%\endcsname
- \else
- \let\c!internal!\c!internal!y
- \@EA\gdef\csname#1#2%\endcsname
- \fi\endcsname}
-
-\def\dosetxvalue#1#2%
- {\let\c!internal!\c!internal!n
- \ifcsname\k!prefix!#2\endcsname
- \let\c!internal!\c!internal!y
- \@EA\xdef\csname#1\csname\k!prefix!#2\endcsname%\endcsname
- \else
- \let\c!internal!\c!internal!y
- \@EA\xdef\csname#1#2%\endcsname
- \fi\endcsname}
-
-\def\docopyvalue#1#2#3% real tricky expansion, quite unreadable
- {\let\c!internal!\c!internal!n
- \ifcsname\k!prefix!#3\endcsname
- \let\c!internal!\c!internal!y
- \@EA\def\csname#1\csname\k!prefix!#3\endcsname
- \@EA\endcsname\@EA{\csname#2\csname\k!prefix!#3\endcsname\endcsname}%
- \else
- \let\c!internal!\c!internal!y
- \@EA\def\csname#1#3\@EA\endcsname\@EA{\csname#2#3\endcsname}%
- \fi}
+% \def\dosetvalue#1#2%
+% {\let\c!internal!\c!internal!n
+% \ifcsname\k!prefix!#2\endcsname
+% \let\c!internal!\c!internal!y
+% \@EA\def\csname#1\csname\k!prefix!#2\endcsname%\endcsname
+% \else
+% \let\c!internal!\c!internal!y
+% \@EA\def\csname#1#2%\endcsname
+% \fi\endcsname}
+
+% \def\dosetevalue#1#2%
+% {\let\c!internal!\c!internal!n
+% \ifcsname\k!prefix!#2\endcsname
+% \let\c!internal!\c!internal!y
+% \@EA\edef\csname#1\csname\k!prefix!#2\endcsname%\endcsname
+% \else
+% \let\c!internal!\c!internal!y
+% \@EA\edef\csname#1#2%\endcsname
+% \fi\endcsname}
+
+% \def\dosetgvalue#1#2%
+% {\let\c!internal!\c!internal!n
+% \ifcsname\k!prefix!#2\endcsname
+% \let\c!internal!\c!internal!y
+% \@EA\gdef\csname#1\csname\k!prefix!#2\endcsname%\endcsname
+% \else
+% \let\c!internal!\c!internal!y
+% \@EA\gdef\csname#1#2%\endcsname
+% \fi\endcsname}
+
+% \def\dosetxvalue#1#2%
+% {\let\c!internal!\c!internal!n
+% \ifcsname\k!prefix!#2\endcsname
+% \let\c!internal!\c!internal!y
+% \@EA\xdef\csname#1\csname\k!prefix!#2\endcsname%\endcsname
+% \else
+% \let\c!internal!\c!internal!y
+% \@EA\xdef\csname#1#2%\endcsname
+% \fi\endcsname}
+
+% \def\docopyvalue#1#2#3% real tricky expansion, quite unreadable
+% {\let\c!internal!\c!internal!n
+% \ifcsname\k!prefix!#3\endcsname
+% \let\c!internal!\c!internal!y
+% \@EA\def\csname#1\csname\k!prefix!#3\endcsname
+% \@EA\endcsname\@EA{\csname#2\csname\k!prefix!#3\endcsname\endcsname}%
+% \else
+% \let\c!internal!\c!internal!y
+% \@EA\def\csname#1#3\@EA\endcsname\@EA{\csname#2#3\endcsname}%
+% \fi}
+
+% \def\dosetvalue #1#2{\@EA \def\csname#1\ifcsname\k!prefix!#2\endcsname\csname\k!prefix!#2\endcsname\else#2\fi\endcsname}
+% \def\dosetevalue#1#2{\@EA\edef\csname#1\ifcsname\k!prefix!#2\endcsname\csname\k!prefix!#2\endcsname\else#2\fi\endcsname}
+% \def\dosetgvalue#1#2{\@EA\gdef\csname#1\ifcsname\k!prefix!#2\endcsname\csname\k!prefix!#2\endcsname\else#2\fi\endcsname}
+% \def\dosetxvalue#1#2{\@EA\xdef\csname#1\ifcsname\k!prefix!#2\endcsname\csname\k!prefix!#2\endcsname\else#2\fi\endcsname}
+
+% \def\docopyvalue#1#2#3% real tricky expansion, quite unreadable
+% {\ifcsname\k!prefix!#3\endcsname
+% \@EA\def\csname#1\csname\k!prefix!#3\endcsname\@EA\endcsname\@EA{\csname#2\csname\k!prefix!#3\endcsname\endcsname}%
+% \else
+% \@EA\def\csname#1#3\@EA\endcsname\@EA{\csname#2#3\endcsname}%
+% \fi}
+
+\def\dosetvalue #1#2{\@EA \def\csname#1#2\endcsname}
+\def\dosetevalue #1#2{\@EA\edef\csname#1#2\endcsname}
+\def\dosetgvalue #1#2{\@EA\gdef\csname#1#2\endcsname}
+\def\dosetxvalue #1#2{\@EA\xdef\csname#1#2\endcsname}
+\def\docopyvalue#1#2#3{\@EA \def\csname#1#3\@EA\endcsname\@EA{\csname#2#3\endcsname}}
%D We can now redefine some messages that will be
%D introduced in the multi||lingual system module.
@@ -744,22 +762,26 @@
%D user constants (keywords) become system variables
%D \stopnarrower
-%D Anno 2003 I've forgotten why the \type {\c!internal} is
-%D still in there; it's probably a left over from the time that
+%D The \type {\c!internal} is a left over from the time that
%D the user interface documents were not using a specification
%D alongside a keyword specification but used a shared file in
%D which case we need to go in both directions.
-\let\c!internal!y \string
-\def\c!internal!n {-}
-\let\c!internal! \c!internal!y
-
% temporary mkiv hack (we can best just store the whole table in memory)
+% \let\c!internal!y \string
+% \def\c!internal!n {-}
+% \let\c!internal! \c!internal!y
+
+% \def\setinterfaceconstant#1#2%
+% {\ctxlua{interfaces.setconstant("#1","#2")}%
+% \setvalue{\c!prefix!#1}{\c!internal!#1}%
+% \setvalue{\k!prefix!#2}{#1}}
+
\def\setinterfaceconstant#1#2%
{\ctxlua{interfaces.setconstant("#1","#2")}%
- \setvalue{\c!prefix!#1}{\c!internal!#1}%
- \setvalue{\k!prefix!#2}{#1}}
+ %\setvalue{\k!prefix!#2}{#1}%
+ \setvalue{\c!prefix!#1}{#1}}
\def\setinterfacevariable#1#2%
{\ctxlua{interfaces.setvariable("#1","#2")}%
diff --git a/tex/context/base/node-dum.lua b/tex/context/base/node-dum.lua
index f39a0873f..9483e51fc 100644
--- a/tex/context/base/node-dum.lua
+++ b/tex/context/base/node-dum.lua
@@ -6,7 +6,20 @@ if not modules then modules = { } end modules ['node-dum'] = {
license = "see context related readme files"
}
-nodes = nodes or { }
+nodes = nodes or { }
+fonts = fonts or { }
+attributes = attributes or { }
+
+local traverse_id = node.traverse_id
+local free_node = node.free
+local remove_node = node.remove
+local new_node = node.new
+
+local glyph = node.id('glyph')
+
+-- fonts
+
+local fontdata = fonts.ids or { }
function nodes.simple_font_handler(head)
-- lang.hyphenate(head)
@@ -17,3 +30,98 @@ function nodes.simple_font_handler(head)
head = node.kerning(head)
return head
end
+
+if tex.attribute[0] ~= 0 then
+
+ texio.write_nl("log","!")
+ texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be")
+ texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special")
+ texio.write_nl("log","! purposed so setting them at the TeX end might break the font handler.")
+ texio.write_nl("log","!")
+
+ tex.attribute[0] = 0 -- else no features
+
+end
+
+nodes.protect_glyphs = node.protect_glyphs
+nodes.unprotect_glyphs = node.unprotect_glyphs
+
+function nodes.process_characters(head)
+ local usedfonts, done, prevfont = { }, false, nil
+ for n in traverse_id(glyph,head) do
+ local font = n.font
+ if font ~= prevfont then
+ prevfont = font
+ local used = usedfonts[font]
+ if not used then
+ local tfmdata = fontdata[font]
+ if tfmdata then
+ local shared = tfmdata.shared -- we need to check shared, only when same features
+ if shared then
+ local processors = shared.processes
+ if processors and #processors > 0 then
+ usedfonts[font] = processors
+ done = true
+ end
+ end
+ end
+ end
+ end
+ end
+ if done then
+ for font, processors in next, usedfonts do
+ for i=1,#processors do
+ local h, d = processors[i](head,font,0)
+ head, done = h or head, done or d
+ end
+ end
+ end
+ return head, true
+end
+
+-- helper
+
+function nodes.kern(k)
+ local n = new_node("kern",1)
+ n.kern = k
+ return n
+end
+
+function nodes.remove(head, current, free_too)
+ local t = current
+ head, current = remove_node(head,current)
+ if t then
+ if free_too then
+ free_node(t)
+ t = nil
+ else
+ t.next, t.prev = nil, nil
+ end
+ end
+ return head, current, t
+end
+
+function nodes.delete(head,current)
+ return nodes.remove(head,current,true)
+end
+
+nodes.before = node.insert_before
+nodes.after = node.insert_after
+
+-- attributes
+
+attributes.unsetvalue = -0x7FFFFFFF
+
+local numbers, last = { }, 127
+
+function attributes.private(name)
+ local number = numbers[name]
+ if not number then
+ if last < 255 then
+ last = last + 1
+ end
+ number = last
+ numbers[name] = number
+ end
+ return number
+end
diff --git a/tex/context/base/node-fin.lua b/tex/context/base/node-fin.lua
index c6e3be448..aabcb0db0 100644
--- a/tex/context/base/node-fin.lua
+++ b/tex/context/base/node-fin.lua
@@ -170,7 +170,7 @@ end
local insert_node_before = node.insert_before
local insert_node_after = node.insert_after
-local nsdata, nsdone, nsforced, nsselector, nstrigger
+local nsdata, nsnone, nslistwise, nsforced, nsselector, nstrigger
local current, current_selector, done = 0, 0, false -- nb, stack has a local current !
function states.initialize(namespace,attribute,head)
diff --git a/tex/context/base/node-fnt.lua b/tex/context/base/node-fnt.lua
index b0d073425..594cfc1a1 100644
--- a/tex/context/base/node-fnt.lua
+++ b/tex/context/base/node-fnt.lua
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['node-fnt'] = {
license = "see context related readme files"
}
+if not context then os.exit() end -- generic function in node-dum
+
local next, type = next, type
local trace_characters = false trackers.register("nodes.characters", function(v) trace_characters = v end)
@@ -33,20 +35,6 @@ local fontdata = fonts.ids
-- happen often; we could consider processing sublists but that might need mor
-- checking later on; the current approach also permits variants
-if tex.attribute[0] < 0 then
-
- texio.write_nl("log","!")
- texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be")
- texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special")
- texio.write_nl("log","! purposed so setting them at the TeX end might break the font handler.")
- texio.write_nl("log","!")
-
- tex.attribute[0] = 0 -- else no features
-
-end
-
--- this will be redone and split in a generic one and a context one
-
function nodes.process_characters(head)
-- either next or not, but definitely no already processed list
starttiming(nodes)
@@ -100,16 +88,17 @@ function nodes.process_characters(head)
prevattr = attr
end
end
+
-- we could combine these and just make the attribute nil
if u == 1 then
local font, processors = next(usedfonts)
local n = #processors
if n > 0 then
- local h, d = processors[1](head,font,false)
+ local h, d = processors[1](head,font,0)
head, done = h or head, done or d
if n > 1 then
for i=2,n do
- local h, d = processors[i](head,font,false)
+ local h, d = processors[i](head,font,0)
head, done = h or head, done or d
end
end
@@ -117,11 +106,11 @@ function nodes.process_characters(head)
elseif u > 0 then
for font, processors in next, usedfonts do
local n = #processors
- local h, d = processors[1](head,font,false)
+ local h, d = processors[1](head,font,0)
head, done = h or head, done or d
if n > 1 then
for i=2,n do
- local h, d = processors[i](head,font,false)
+ local h, d = processors[i](head,font,0)
head, done = h or head, done or d
end
end
@@ -162,46 +151,5 @@ function nodes.process_characters(head)
return head, true
end
-if node.protect_glyphs then
-
- nodes.protect_glyphs = node.protect_glyphs
- nodes.unprotect_glyphs = node.unprotect_glyphs
-
-else do
-
- -- initial value subtype : X000 0001 = 1 = 0x01 = char
- --
- -- expected before linebreak : X000 0000 = 0 = 0x00 = glyph
- -- X000 0010 = 2 = 0x02 = ligature
- -- X000 0100 = 4 = 0x04 = ghost
- -- X000 1010 = 10 = 0x0A = leftboundary lig
- -- X001 0010 = 18 = 0x12 = rightboundary lig
- -- X001 1010 = 26 = 0x1A = both boundaries lig
- -- X000 1100 = 12 = 0x1C = leftghost
- -- X001 0100 = 20 = 0x14 = rightghost
-
- function nodes.protect_glyphs(head)
- local done = false
- for g in traverse_id(glyph,head) do
- local s = g.subtype
- if s == 1 then
- done, g.subtype = true, 256
- elseif s <= 256 then
- done, g.subtype = true, 256 + s
- end
- end
- return done
- end
-
- function nodes.unprotect_glyphs(head)
- local done = false
- for g in traverse_id(glyph,head) do
- local s = g.subtype
- if s > 256 then
- done, g.subtype = true, s - 256
- end
- end
- return done
- end
-
-end end
+nodes.protect_glyphs = node.protect_glyphs
+nodes.unprotect_glyphs = node.unprotect_glyphs
diff --git a/tex/context/base/node-ini.lua b/tex/context/base/node-ini.lua
index 36e240238..8f507e9b1 100644
--- a/tex/context/base/node-ini.lua
+++ b/tex/context/base/node-ini.lua
@@ -17,54 +17,6 @@ local utf = unicode.utf8
local next, type = next, type
local format, concat, match, utfchar = string.format, table.concat, string.match, utf.char
-local chardata = characters and characters.data
-
---[[ldx--
-<p>We start with a registration system for atributes so that we can use the
-symbolic names later on.</p>
---ldx]]--
-
-attributes = attributes or { }
-
-attributes.names = attributes.names or { }
-attributes.numbers = attributes.numbers or { }
-attributes.list = attributes.list or { }
-attributes.unsetvalue = -0x7FFFFFFF
-
-storage.register("attributes/names", attributes.names, "attributes.names")
-storage.register("attributes/numbers", attributes.numbers, "attributes.numbers")
-storage.register("attributes/list", attributes.list, "attributes.list")
-
-local names, numbers, list = attributes.names, attributes.numbers, attributes.list
-
-function attributes.define(name,number) -- at the tex end
- if not numbers[name] then
- numbers[name], names[number], list[number] = number, name, { }
- end
-end
-
---[[ldx--
-<p>We can use the attributes in the range 127-255 (outside user space). These
-are only used when no attribute is set at the \TEX\ end which normally
-happens in <l n='context'/>.</p>
---ldx]]--
-
-storage.shared.attributes_last_private = storage.shared.attributes_last_private or 127
-
-function attributes.private(name) -- at the lua end (hidden from user)
- local number = numbers[name]
- if not number then
- local last = storage.shared.attributes_last_private or 127
- if last < 255 then
- last = last + 1
- storage.shared.attributes_last_private = last
- end
- number = last
- numbers[name], names[number], list[number] = number, name, { }
- end
- return number
-end
-
--[[ldx--
<p>Access to nodes is what gives <l n='luatex'/> its power. Here we
implement a few helper functions. These functions are rather optimized.</p>
@@ -224,21 +176,6 @@ end
nodes.count = count
--- new, will move
-
-function attributes.ofnode(n)
- local a = n.attr
- if a then
- local names = attributes.names
- a = a.next
- while a do
- local number, value = a.number, a.value
- texio.write_nl(format("%s : attribute %3i, value %4i, name %s",tostring(n),number,value,names[number] or '?'))
- a = a.next
- end
- end
-end
-
local left, space = lpeg.P("<"), lpeg.P(" ")
nodes.filterkey = left * (1-left)^0 * left * space^0 * lpeg.C((1-space)^0)
diff --git a/tex/context/base/node-ini.mkiv b/tex/context/base/node-ini.mkiv
index 787259316..23cf7b842 100644
--- a/tex/context/base/node-ini.mkiv
+++ b/tex/context/base/node-ini.mkiv
@@ -33,41 +33,6 @@
\registerctxluafile{node-inj}{1.001} % we might split it off
\registerctxluafile{node-typ}{1.001} % experimental
-\newtoks \attributesresetlist
-
-\ifdefined \v!global \else \def\v!global{global} \fi % for metatex
-
-\unexpanded\def\defineattribute
- {\dodoubleempty\dodefineattribute}
-
-\def\dodefineattribute[#1][#2]% alternatively we can let lua do the housekeeping
- {\expandafter\newattribute\csname @attr@#1\endcsname
- \expandafter \xdef\csname :attr:#1\endcsname{\number\lastallocatedattribute}%
- \ctxlua{attributes.define("#1",\number\lastallocatedattribute)}%
- %\writestatus\m!systems{defining attribute #1 with number \number\lastallocatedattribute}%
- \doifnotinset\v!global{#2}{\appendetoks\csname @attr@#1\endcsname\attributeunsetvalue\to\attributesresetlist}}
-
-\unexpanded\def\definesystemattribute
- {\dodoubleempty\dodefinesystemattribute}
-
-\def\dodefinesystemattribute[#1][#2]% alternatively we can let lua do the housekeeping
- {\scratchcounter\ctxlua{tex.print(attributes.private("#1"))}\relax
- \global\expandafter\attributedef\csname @attr@#1\endcsname\scratchcounter
- \expandafter \xdef\csname :attr:#1\endcsname{\number\scratchcounter}%
- %\writestatus\m!systems{defining system attribute #1 with number \number\scratchcounter}%
- \doifnotinset\v!global{#2}{\appendetoks\csname @attr@#1\endcsname\attributeunsetvalue\to\attributesresetlist}}
-
-% expandable so we can \edef them for speed
-
-\def\dosetattribute#1#2{\csname @attr@#1\endcsname#2\relax}
-\def\doresetattribute#1{\csname @attr@#1\endcsname\attributeunsetvalue}
-\def\dogetattribute #1{\number\csname @attr@#1\endcsname}
-\def\dogetattributeid#1{\csname :attr:#1\endcsname}
-
-\let\dompattribute\gobbletwoarguments
-
-\def\resetallattributes{\the\attributesresetlist}
-
\newcount\shownodescounter
\def\shownextnodes {\afterassignment\doshownodes\shownextnodescounter}
diff --git a/tex/context/base/node-inj.lua b/tex/context/base/node-inj.lua
index 9c4612a22..22a716a12 100644
--- a/tex/context/base/node-inj.lua
+++ b/tex/context/base/node-inj.lua
@@ -17,6 +17,8 @@ local next = next
local trace_injections = false trackers.register("nodes.injections", function(v) trace_injections = v end)
+local report_injections = logs.new("injections")
+
fonts = fonts or { }
fonts.tfm = fonts.tfm or { }
fonts.ids = fonts.ids or { }
@@ -27,6 +29,7 @@ local glyph = node.id('glyph')
local kern = node.id('kern')
local traverse_id = node.traverse_id
+local unset_attribute = node.unset_attribute
local has_attribute = node.has_attribute
local set_attribute = node.set_attribute
local insert_node_before = node.insert_before
@@ -88,8 +91,10 @@ function nodes.set_kern(current,factor,rlmode,x,tfmchr)
local bound = #kerns + 1
set_attribute(current,kernpair,bound)
kerns[bound] = { rlmode, dx }
+ return dx, bound
+ else
+ return 0, 0
end
- return dx, bound
end
function nodes.set_mark(start,base,factor,rlmode,ba,ma,index) --ba=baseanchor, ma=markanchor
@@ -104,7 +109,7 @@ function nodes.set_mark(start,base,factor,rlmode,ba,ma,index) --ba=baseanchor, m
set_attribute(start,markdone,index)
return dx, dy, bound
else
- logs.report("nodes mark", "possible problem, U+%04X is base without data (id: %s)",base.char,bound)
+ report_injections("possible problem, U+%04X is base mark without data (id: %s)",base.char,bound)
end
end
index = index or 1
@@ -120,10 +125,7 @@ function nodes.trace_injection(head)
local function dir(n)
return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or ("unset")
end
- local function report(...)
- logs.report("nodes finisher",...)
- end
- report("begin run")
+ report_injections("begin run")
for n in traverse_id(glyph,head) do
if n.subtype < 256 then
local kp = has_attribute(n,kernpair)
@@ -132,45 +134,46 @@ function nodes.trace_injection(head)
local md = has_attribute(n,markdone)
local cb = has_attribute(n,cursbase)
local cc = has_attribute(n,curscurs)
- report("char U+%05X, font=%s",n.char,n.font)
+ report_injections("char U+%05X, font=%s",n.char,n.font)
if kp then
local k = kerns[kp]
if k[3] then
- report(" pairkern: dir=%s, x=%s, y=%s, w=%s, h=%s",dir(k[1]),k[2] or "?",k[3] or "?",k[4] or "?",k[5] or "?")
+ report_injections(" pairkern: dir=%s, x=%s, y=%s, w=%s, h=%s",dir(k[1]),k[2] or "?",k[3] or "?",k[4] or "?",k[5] or "?")
else
- report(" kern: dir=%s, dx=%s",dir(k[1]),k[2] or "?")
+ report_injections(" kern: dir=%s, dx=%s",dir(k[1]),k[2] or "?")
end
end
if mb then
- report(" markbase: bound=%s",mb)
+ report_injections(" markbase: bound=%s",mb)
end
if mm then
local m = marks[mm]
if mb then
local m = m[mb]
if m then
- report(" markmark: bound=%s, index=%s, dx=%s, dy=%s",mm,md or "?",m[1] or "?",m[2] or "?")
+ report_injections(" markmark: bound=%s, index=%s, dx=%s, dy=%s",mm,md or "?",m[1] or "?",m[2] or "?")
else
- report(" markmark: bound=%s, missing index",mm)
+ report_injections(" markmark: bound=%s, missing index",mm)
end
else
m = m[1]
- report(" markmark: bound=%s, dx=%s, dy=%s",mm,m[1] or "?",m[2] or "?")
+ report_injections(" markmark: bound=%s, dx=%s, dy=%s",mm,m[1] or "?",m[2] or "?")
end
end
if cb then
- report(" cursbase: bound=%s",cb)
+ report_injections(" cursbase: bound=%s",cb)
end
if cc then
local c = cursives[cc]
- report(" curscurs: bound=%s, dir=%s, dx=%s, dy=%s",cc,dir(c[1]),c[2] or "?",c[3] or "?")
+ report_injections(" curscurs: bound=%s, dir=%s, dx=%s, dy=%s",cc,dir(c[1]),c[2] or "?",c[3] or "?")
end
end
end
- report("end run")
+ report_injections("end run")
end
-- todo: reuse tables (i.e. no collection), but will be extra fields anyway
+-- todo: check for attribute
function nodes.inject_kerns(head,where,keep)
local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns)
@@ -193,6 +196,7 @@ function nodes.inject_kerns(head,where,keep)
mk[n] = tm[n.char]
local k = has_attribute(n,kernpair)
if k then
+--~ unset_attribute(k,kernpair)
local kk = kerns[k]
if kk then
local x, y, w, h = kk[2] or 0, kk[3] or 0, kk[4] or 0, kk[5] or 0
@@ -342,39 +346,21 @@ function nodes.inject_kerns(head,where,keep)
-- only w can be nil, can be sped up when w == nil
local rl, x, w, r2l = k[1], k[2] or 0, k[4] or 0, k[6]
local wx = w - x
---~ if rl < 0 then
---~ if r2l then
---~ if wx ~= 0 then
---~ insert_node_before(head,n,newkern(wx))
---~ end
---~ if x ~= 0 then
---~ insert_node_after (head,n,newkern(x))
---~ end
---~ else
---~ if x ~= 0 then
---~ insert_node_before(head,n,newkern(x))
---~ end
---~ if wx ~= 0 then
---~ insert_node_after(head,n,newkern(wx))
---~ end
---~ end
---~ else
- if r2l then
- if wx ~= 0 then
- insert_node_before(head,n,newkern(wx))
- end
- if x ~= 0 then
- insert_node_after (head,n,newkern(x))
- end
- else
- if x ~= 0 then
- insert_node_before(head,n,newkern(x))
- end
- if wx ~= 0 then
- insert_node_after(head,n,newkern(wx))
- end
+ if r2l then
+ if wx ~= 0 then
+ insert_node_before(head,n,newkern(wx))
+ end
+ if x ~= 0 then
+ insert_node_after (head,n,newkern(x))
+ end
+ else
+ if x ~= 0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ if wx ~= 0 then
+ insert_node_after(head,n,newkern(wx))
end
---~ end
+ end
end
end
if next(cx) then
@@ -401,35 +387,19 @@ function nodes.inject_kerns(head,where,keep)
nodes.trace_injection(head)
end
for n in traverse_id(glyph,head) do
- local k = has_attribute(n,kernpair)
- if k then
- local kk = kerns[k]
- if kk then
- local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4]
- if y and y ~= 0 then
- n.yoffset = y -- todo: h ?
- end
- if w then
- -- copied from above
- local r2l = kk[6]
- local wx = w - x
---~ if rl < 0 then
---~ if r2l then
---~ if x ~= 0 then
---~ insert_node_before(head,n,newkern(x))
---~ end
---~ if wx ~= 0 then
---~ insert_node_after(head,n,newkern(wx))
---~ end
---~ else
---~ if wx ~= 0 then
---~ insert_node_before(head,n,newkern(wx))
---~ end
---~ if x ~= 0 then
---~ insert_node_after (head,n,newkern(x))
---~ end
---~ end
---~ else
+ if n.subtype < 256 then
+ local k = has_attribute(n,kernpair)
+ if k then
+ local kk = kerns[k]
+ if kk then
+ local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4]
+ if y and y ~= 0 then
+ n.yoffset = y -- todo: h ?
+ end
+ if w then
+ -- copied from above
+ local r2l = kk[6]
+ local wx = w - x
if r2l then
if wx ~= 0 then
insert_node_before(head,n,newkern(wx))
@@ -445,11 +415,11 @@ function nodes.inject_kerns(head,where,keep)
insert_node_after(head,n,newkern(wx))
end
end
---~ end
- else
- -- simple (e.g. kernclass kerns)
- if x ~= 0 then
- insert_node_before(head,n,newkern(x))
+ else
+ -- simple (e.g. kernclass kerns)
+ if x ~= 0 then
+ insert_node_before(head,n,newkern(x))
+ end
end
end
end
diff --git a/tex/context/base/node-mig.lua b/tex/context/base/node-mig.lua
index f9f0ad231..c014c8de4 100644
--- a/tex/context/base/node-mig.lua
+++ b/tex/context/base/node-mig.lua
@@ -19,9 +19,9 @@ local remove_nodes = nodes.remove
local migrated = attributes.private("migrated")
-local trace_migrations = false
+local trace_migrations = false trackers.register("nodes.migrations", function(v) trace_migrations = v end)
-trackers.register("nodes.migrations", function(v) trace_migrations = v end)
+local report_nodes = logs.new("nodes")
local migrate_inserts, migrate_marks
@@ -82,7 +82,7 @@ function nodes.migrate_outwards(head,where)
if first then
t_inserts, t_marks = t_inserts + ni, t_marks + nm
if trace_migrations and (ni > 0 or nm > 0) then
- logs.report("nodes","sweep %s, %s inserts and %s marks migrated outwards",t_sweeps,ni,nm)
+ report_nodes("sweep %s, %s inserts and %s marks migrated outwards",t_sweeps,ni,nm)
end
-- inserts after head
local n = current.next
diff --git a/tex/context/base/node-par.lua b/tex/context/base/node-par.lua
index 7be7e7917..b5140f7a1 100644
--- a/tex/context/base/node-par.lua
+++ b/tex/context/base/node-par.lua
@@ -15,6 +15,8 @@ parbuilders.attribute = attributes.numbers['parbuilder'] or 999
storage.register("parbuilders.names", parbuilders.names, "parbuilders.names")
storage.register("parbuilders.numbers", parbuilders.numbers, "parbuilders.numbers")
+local report_parbuilders = logs.new("parbuilders")
+
local constructors, names, numbers, p_attribute = parbuilders.constructors, parbuilders.names, parbuilders.numbers, parbuilders.attribute
local has_attribute = node.has_attribute
@@ -49,7 +51,7 @@ function parbuilders.constructor(head,followed_by_display)
if handler then
return handler(head,followed_by_display)
else
- logs.report("parbuilders","handler '%s' is not defined",tostring(constructor))
+ report_parbuilders("handler '%s' is not defined",tostring(constructor))
return true -- let tex break
end
end
diff --git a/tex/context/base/node-pro.lua b/tex/context/base/node-pro.lua
index 4f5b3dcbe..c7c02b414 100644
--- a/tex/context/base/node-pro.lua
+++ b/tex/context/base/node-pro.lua
@@ -7,10 +7,13 @@ if not modules then modules = { } end modules ['node-pro'] = {
}
local utf = unicode.utf8
+local utfchar = utf.char
local format, concat = string.format, table.concat
local trace_callbacks = false trackers.register("nodes.callbacks", function(v) trace_callbacks = v end)
+local report_nodes = logs.new("nodes")
+
local glyph = node.id('glyph')
local free_node = node.free
@@ -35,7 +38,7 @@ local function reconstruct(head)
while h do
local id = h.id
if id == glyph then
- t[#t+1] = utf.char(h.char)
+ t[#t+1] = utfchar(h.char)
else
t[#t+1] = "[]"
end
@@ -52,12 +55,14 @@ local function tracer(what,state,head,groupcode,before,after,show)
end
n = n + 1
if show then
- logs.report("nodes","%s %s: %s, group: %s, nodes: %s -> %s, string: %s",what,n,state,groupcode,before,after,reconstruct(head))
+ report_nodes("%s %s: %s, group: %s, nodes: %s -> %s, string: %s",what,n,state,groupcode,before,after,reconstruct(head))
else
- logs.report("nodes","%s %s: %s, group: %s, nodes: %s -> %s",what,n,state,groupcode,before,after)
+ report_nodes("%s %s: %s, group: %s, nodes: %s -> %s",what,n,state,groupcode,before,after)
end
end
+nodes.processors.tracer = tracer
+
nodes.processors.enabled = true -- thsi will become a proper state (like trackers)
function nodes.processors.pre_linebreak_filter(head,groupcode,size,packtype,direction)
diff --git a/tex/context/base/node-ref.lua b/tex/context/base/node-ref.lua
index 7128b1a6d..e85b50910 100644
--- a/tex/context/base/node-ref.lua
+++ b/tex/context/base/node-ref.lua
@@ -28,6 +28,8 @@ local trace_backend = false trackers.register("nodes.backend", functi
local trace_references = false trackers.register("nodes.references", function(v) trace_references = v end)
local trace_destinations = false trackers.register("nodes.destinations", function(v) trace_destinations = v end)
+local report_backends = logs.new("backends")
+
local hlist = node.id("hlist")
local vlist = node.id("vlist")
local glue = node.id("glue")
@@ -78,14 +80,14 @@ local function inject_range(head,first,last,reference,make,stack,parent,pardir,t
if result and resolved then
if head == first then
if trace_backend then
- logs.report("backend","head: %04i %s %s %s => w=%s, h=%s, d=%s, c=%s",reference,pardir or "---",txtdir or "----",tosequence(first,last,true),width,height,depth,resolved)
+ report_backends("head: %04i %s %s %s => w=%s, h=%s, d=%s, c=%s",reference,pardir or "---",txtdir or "----",tosequence(first,last,true),width,height,depth,resolved)
end
result.next = first
first.prev = result
return result, last
else
if trace_backend then
- logs.report("backend","middle: %04i %s %s => w=%s, h=%s, d=%s, c=%s",reference,pardir or "---",txtdir or "----",tosequence(first,last,true),width,height,depth,resolved)
+ report_backends("middle: %04i %s %s => w=%s, h=%s, d=%s, c=%s",reference,pardir or "---",txtdir or "----",tosequence(first,last,true),width,height,depth,resolved)
end
local prev = first.prev
if prev then
@@ -156,7 +158,7 @@ local function inject_list(id,current,reference,make,stack,pardir,txtdir)
local result, resolved = make(width,height,depth,reference)
if result and resolved then
if trace_backend then
- logs.report("backend","box: %04i %s %s: w=%s, h=%s, d=%s, c=%s",reference,pardir or "---",txtdir or "----",width,height,depth,resolved)
+ report_backends("box: %04i %s %s: w=%s, h=%s, d=%s, c=%s",reference,pardir or "---",txtdir or "----",width,height,depth,resolved)
end
if not first then
current.list = result
@@ -336,6 +338,7 @@ nodes.setreference = setreference
local function makereference(width,height,depth,reference)
local sr = stack[reference]
if sr then
+ report_backends("resolving reference attribute %s",reference)
local resolved, ht, dp, set = sr[1], sr[2], sr[3], sr[4]
if ht then
if height < ht then height = ht end
@@ -361,10 +364,10 @@ local function makereference(width,height,depth,reference)
if cleanupreferences then stack[reference] = nil end
return result, resolved
else
- logs.report("backends","unable to resolve reference annotation %s",reference)
+ report_backends("unable to resolve reference annotation %s",reference)
end
else
- logs.report("backends","unable to resolve reference attribute %s",reference)
+ report_backends("unable to resolve reference attribute %s",reference)
end
end
@@ -399,6 +402,7 @@ nodes.setdestination = setdestination
local function makedestination(width,height,depth,reference)
local sr = stack[reference]
if sr then
+ report_backends("resolving destination attribute %s",reference)
local resolved, ht, dp, name, view = sr[1], sr[2], sr[3], sr[4], sr[5]
if ht then
if height < ht then height = ht end
@@ -440,7 +444,7 @@ local function makedestination(width,height,depth,reference)
if cleanupdestinations then stack[reference] = nil end
return result, resolved
else
- logs.report("backends","unable to resolve destination attribute %s",reference)
+ report_backends("unable to resolve destination attribute %s",reference)
end
end
diff --git a/tex/context/base/node-res.lua b/tex/context/base/node-res.lua
index a8ea8745a..21bcae1d4 100644
--- a/tex/context/base/node-res.lua
+++ b/tex/context/base/node-res.lua
@@ -86,6 +86,7 @@ local baselineskip = register_node(new_node("glue",2))
local leftskip = register_node(new_node("glue",8))
local rightskip = register_node(new_node("glue",9))
local temp = register_node(new_node("temp",0))
+local noad = register_node(new_node("noad"))
function nodes.zeroglue(n)
local s = n.spec
@@ -212,6 +213,11 @@ end
function nodes.temp()
return copy_node(temp)
end
+
+function nodes.noad()
+ return copy_node(noad)
+end
+
--[[
<p>At some point we ran into a problem that the glue specification
of the zeropoint dimension was overwritten when adapting a glue spec
@@ -225,33 +231,16 @@ and hide it for the user. And yes, LuaTeX now gives a warning as
well.</p>
]]--
-if tex.luatexversion > 51 then
-
- function nodes.writable_spec(n)
- local spec = n.spec
- if not spec then
- spec = copy_node(glue_spec)
- n.spec = spec
- elseif not spec.writable then
- spec = copy_node(spec)
- n.spec = spec
- end
- return spec
- end
-
-else
-
- function nodes.writable_spec(n)
- local spec = n.spec
- if not spec then
- spec = copy_node(glue_spec)
- else
- spec = copy_node(spec)
- end
+function nodes.writable_spec(n)
+ local spec = n.spec
+ if not spec then
+ spec = copy_node(glue_spec)
+ n.spec = spec
+ elseif not spec.writable then
+ spec = copy_node(spec)
n.spec = spec
- return spec
end
-
+ return spec
end
local cache = { }
@@ -300,3 +289,5 @@ end) -- \topofboxstack
statistics.register("node memory usage", function() -- comes after cleanup !
return status.node_mem_usage
end)
+
+lua.registerfinalizer(nodes.cleanup_reserved)
diff --git a/tex/context/base/node-rul.lua b/tex/context/base/node-rul.lua
index 9dd89bcda..74a730893 100644
--- a/tex/context/base/node-rul.lua
+++ b/tex/context/base/node-rul.lua
@@ -8,6 +8,8 @@ if not modules then modules = { } end modules ['node-rul'] = {
-- this will go to an auxiliary module
-- beware: rules now have a dir field
+--
+-- todo: make robust for layers ... order matters
local glyph = node.id("glyph")
local disc = node.id("disc")
@@ -45,6 +47,8 @@ end
local trace_ruled = false trackers.register("nodes.ruled", function(v) trace_ruled = v end)
+local report_ruled = logs.new("ruled")
+
local floor = math.floor
local n_tostring, n_tosequence = nodes.ids_tostring, nodes.tosequence
@@ -86,6 +90,9 @@ local checkdir = true
-- we assume {glyphruns} and no funny extra kerning, ok, maybe we need
-- a dummy character as start and end; anyway we only collect glyphs
+--
+-- this one needs to take layers into account (i.e. we need a list of
+-- critical attributes)
local function process_words(attribute,data,flush,head,parent) -- we have hlistdir and local dir
local n = head
@@ -172,8 +179,14 @@ function nodes.rules.define(settings)
texwrite(#data)
end
+local a_viewerlayer = attributes.private("viewerlayer")
+
local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but acceptable for this purpose
-- check for f and l
+if f.id ~= glyph then
+ -- saveguard ... we need to deal with rules and so (math)
+ return head
+end
local r, m
if true then
f, l = strip_range(f,l)
@@ -181,7 +194,7 @@ local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but a
local w = list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,f,l.next)
local method, offset, continue, dy, rulethickness, unit, order, max, ma, ca, ta =
d.method, d.offset, d.continue, d.dy, d.rulethickness, d.unit, d.order, d.max, d.ma, d.ca, d.ta
- local e = dimenfactor(unit,fontdata[f.font])
+ local e = dimenfactor(unit,fontdata[f.font]) -- what if no glyph node
local colorspace = (ma > 0 and ma) or has_attribute(f,a_colorspace) or 1
local color = (ca > 0 and ca) or has_attribute(f,a_color)
local transparency = (ta > 0 and ta) or has_attribute(f,a_transparency)
@@ -200,6 +213,12 @@ local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but a
local ht = (offset+(i-1)*dy+rulethickness)*e - m
local dp = -(offset+(i-1)*dy-rulethickness)*e + m
local r = new_rule(w,ht,dp)
+ local v = has_attribute(f,a_viewerlayer)
+-- quick hack
+if v then
+ set_attribute(r,a_viewerlayer,v)
+end
+--
if color then
set_attribute(r,a_colorspace,colorspace)
set_attribute(r,a_color,color)
@@ -213,11 +232,11 @@ local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but a
insert_after(head,k,r)
l = r
else
- head, _ = insert_before(head,f,r)
+ head = insert_before(head,f,r)
insert_after(head,r,k)
end
if trace_ruled then
- logs.report("ruled", "level: %s, width: %i, height: %i, depth: %i, nodes: %s, text: %s",
+ report_ruled("level: %s, width: %i, height: %i, depth: %i, nodes: %s, text: %s",
level,w,ht,dp,n_tostring(f,l),n_tosequence(f,l,true))
-- level,r.width,r.height,r.depth,n_tostring(f,l),n_tosequence(f,l,true))
end
@@ -240,6 +259,8 @@ end
local trace_shifted = false trackers.register("nodes.shifted", function(v) trace_shifted = v end)
+local report_shifted = logs.new("shifted")
+
local a_shifted = attributes.private('shifted')
nodes.shifts = nodes.shifts or { }
@@ -274,7 +295,7 @@ local function flush_shifted(head,first,last,data,level,parent,strip) -- not tha
local raise = data.dy * dimenfactor(data.unit,fontdata[first.font])
list.shift, list.height, list.depth = raise, height, depth
if trace_shifted then
- logs.report("shifted", "width: %s, nodes: %s, text: %s",width,n_tostring(first,last),n_tosequence(first,last,true))
+ report_shifted("width: %s, nodes: %s, text: %s",width,n_tostring(first,last),n_tosequence(first,last,true))
end
return head
end
diff --git a/tex/context/base/node-spl.lua b/tex/context/base/node-spl.lua
new file mode 100644
index 000000000..d6ecdfa13
--- /dev/null
+++ b/tex/context/base/node-spl.lua
@@ -0,0 +1,589 @@
+if not modules then modules = { } end modules ['node-spl'] = {
+ version = 1.001,
+ comment = "companion to node-spl.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This module is dedicated to the oriental tex project and for
+-- the moment is too experimental to be publicly supported.
+--
+-- We could cache solutions: say that we store the featureset and
+-- all 'words' -> replacement ... so we create a large solution
+-- database (per font)
+--
+-- This module can be optimized by using a dedicated dynamics handler
+-- but I'll only do that when the rest of the code is stable.
+--
+-- Todo: bind setups to paragraph.
+
+local gmatch, concat, format, remove = string.gmatch, table.concat, string.format, table.remove
+local next, tostring, tonumber = next, tostring, tonumber
+local utfchar = utf.char
+local random = math.random
+local variables = interfaces.variables
+
+local trace_split = false trackers.register("parbuilders.solutions.splitters.splitter", function(v) trace_split = v end)
+local trace_optimize = false trackers.register("parbuilders.solutions.splitters.optimizer", function(v) trace_optimize = v end)
+local trace_colors = false trackers.register("parbuilders.solutions.splitters.colors", function(v) trace_colors = v end)
+local trace_goodies = false trackers.register("fonts.goodies", function(v) trace_goodies = v end)
+
+local report_fonts = logs.new("fonts")
+local report_splitter = logs.new("splitter")
+local report_optimizer = logs.new("optimizer")
+
+local glyph = node.id("glyph")
+local glue = node.id("glue")
+local kern = node.id("kern")
+local disc = node.id("disc")
+local hlist = node.id("hlist")
+local whatsit = node.id("whatsit")
+
+local find_node_tail = node.tail or node.slide
+local free_node = node.free
+local free_nodelist = node.flush_list
+local has_attribute = node.has_attribute
+local set_attribute = node.set_attribute
+local new_node = node.new
+local copy_node = node.copy
+local copy_nodelist = node.copy_list
+local traverse_nodes = node.traverse
+local traverse_ids = node.traverse_id
+local protect_nodes = node.protect_glyphs
+local hpack_nodes = node.hpack
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local repack_hlist = nodes.repack_hlist
+
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+
+local process_characters = nodes.process_characters
+local inject_kerns = nodes.inject_kerns
+local set_dynamics = fonts.otf.set_dynamics
+local fontdata = fonts.ids
+
+parbuilders.solutions = parbuilders.solutions or { }
+parbuilders.solutions.splitters = parbuilders.solutions.splitters or { }
+
+local splitters = parbuilders.solutions.splitters
+
+local preroll = true
+local variant = "normal"
+local split = attributes.private('splitter')
+local cache = { }
+local solutions = { } -- attribute sets
+local variants = { }
+local max_less = 0
+local max_more = 0
+local criterium = 0
+local randomseed = nil
+local optimize = nil -- set later
+
+function splitters.setup(setups)
+ local method = aux.settings_to_hash(setups.method or "")
+ if method[variables.preroll] then
+ preroll = true
+ else
+ preroll = false
+ end
+ for k, v in next, method do
+ if variants[k] then
+ optimize = variants[k]
+ end
+ end
+ randomseed = tonumber(setups.randomseed)
+ criterium = tonumber(setups.criterium) or criterium
+end
+
+local function convert(featuresets,name,set,what)
+ local list, numbers = set[what], { }
+ if list then
+ local setups = fonts.define.specify.context_setups
+ for i=1,#list do
+ local feature = list[i]
+ local fs = featuresets[feature]
+ local fn = fs and fs.number
+ if not fn then
+ -- fall back on global features
+ fs = setups[feature]
+ fn = fs and fs.number
+ end
+ if fn then
+ numbers[#numbers+1] = fn
+ if trace_goodies or trace_optimize then
+ report_fonts("solution %s of '%s' uses feature '%s' with number %s",i,name,feature,fn)
+ end
+ else
+ report_fonts("solution %s has an invalid feature reference '%s'",i,name,tostring(feature))
+ end
+ end
+ return #numbers > 0 and numbers
+ end
+end
+
+local function initialize(goodies)
+ local solutions = goodies.solutions
+ if solutions then
+ local featuresets = goodies.featuresets
+ local goodiesname = goodies.name
+ if trace_goodies or trace_optimize then
+ report_fonts("checking solutions in '%s'",goodiesname)
+ end
+ for name, set in next, solutions do
+ set.less = convert(featuresets,name,set,"less")
+ set.more = convert(featuresets,name,set,"more")
+ end
+ end
+end
+
+fonts.goodies.register("solutions",initialize)
+
+function splitters.define(name,parameters)
+ local setups = fonts.define.specify.context_setups
+ local settings = aux.settings_to_hash(parameters) -- todo: interfacing
+ local goodies, solution, less, more = settings.goodies, settings.solution, settings.less, settings.more
+ local less_set, more_set
+ local l = less and aux.settings_to_array(less)
+ local m = more and aux.settings_to_array(more)
+ if goodies then
+ goodies = fonts.goodies.get(goodies) -- also in tfmdata
+ if goodies then
+ local featuresets = goodies.featuresets
+ local solution = solution and goodies.solutions[solution]
+ if l and #l > 0 then
+ less_set = convert(featuresets,name,settings,"less") -- take from settings
+ else
+ less_set = solution and solution.less -- take from goodies
+ end
+ if m and #m > 0 then
+ more_set = convert(featuresets,name,settings,"more") -- take from settings
+ else
+ more_set = solution and solution.more -- take from goodies
+ end
+ end
+ else
+ if l then
+ for i=1,#l do
+ local ss = setups[l[i]]
+ if ss then
+ less_set[#less_set+1] = ss.number
+ end
+ end
+ end
+ if m then
+ for i=1,#m do
+ local ss = setups[m[i]]
+ if ss then
+ more_set[#more_set+1] = ss.number
+ end
+ end
+ end
+ end
+ if trace_optimize then
+ report_fonts("defining solutions '%s', less: '%s', more: '%s'",name,concat(less_set or {}," "),concat(more_set or {}," "))
+ end
+ solutions[#solutions+1] = {
+ solution = solution,
+ less = less_set or { },
+ more = more_set or { },
+ settings = settings, -- for tracing
+ }
+--~ print(table.serialize(solutions[#solutions]))
+ tex.write(#solutions)
+end
+
+local user_node_one = nodes.register(new_node("whatsit",44)) user_node_one.user_id, user_node_one.type = 1, 100
+local user_node_two = nodes.register(new_node("whatsit",44)) user_node_two.user_id, user_node_two.type = 2, 100
+local text_node_trt = nodes.register(new_node("whatsit", 7)) text_node_trt.dir = "+TRT"
+
+local fcs = (fonts.color and fonts.color.set) or function() end
+
+local nofwords, noftries, nofadapted, nofkept, nofparagraphs = 0, 0, 0, 0, 0
+
+function splitters.split(head)
+ -- quite fast
+ local current, done, rlmode, start, stop, attribute = head, false, false, nil, nil, 0
+ cache, max_less, max_more = { }, 0, 0
+ local function flush() -- we can move this
+ local font = start.font
+ local last = stop.next
+ local list = last and copy_nodelist(start,last) or copy_nodelist(start)
+ local n = #cache + 1
+ local user_one = copy_node(user_node_one)
+ local user_two = copy_node(user_node_two)
+ user_one.value, user_two.value = n, n
+ head, start = insert_node_before(head,start,user_one)
+ insert_node_after(head,stop,user_two)
+ if rlmode == "TRT" or rlmode == "+TRT" then
+ local dirnode = copy_node(text_node_trt)
+ list.prev = dirnode
+ dirnode.next = list
+ list = dirnode
+ end
+ local c = {
+ original = list,
+ attribute = attribute,
+ direction = rlmode,
+ font = font
+ }
+ if trace_split then
+ report_splitter( "cached %4i: font: %s, attribute: %s, word: %s, direction: %s", n,
+ font, attribute, nodes.list_to_utf(list,true), rlmode)
+ end
+ cache[n] = c
+ local solution = solutions[attribute]
+ local l, m = #solution.less, #solution.more
+ if l > max_less then max_less = l end
+ if m > max_more then max_more = m end
+ start, stop, done = nil, nil, true
+ end
+ while current do
+ local id = current.id
+ if id == glyph and current.subtype < 255 then
+ local a = has_attribute(current,split)
+ if not a then
+ start, stop = nil, nil
+ elseif not start then
+ start, stop, attribute = current, current, a
+ elseif a ~= attribute then
+ start, stop = nil, nil
+ else
+ stop = current
+ end
+ current = current.next
+ elseif id == disc then
+ start, stop, current = nil, nil, current.next
+ elseif id == whatsit then
+ if start then
+ flush()
+ end
+ local subtype = current.subtype
+ if subtype == 7 or subtype == 6 then
+ rlmode = current.dir
+ end
+ current = current.next
+ else
+ if start then
+ flush()
+ end
+ current = current.next
+ end
+ end
+ if start then
+ flush()
+ end
+ nofparagraphs = nofparagraphs + 1
+ nofwords = nofwords + #cache
+ return head, done
+end
+
+local function collect_words(list)
+ local words, word = { }, nil
+ for current in traverse_ids(whatsit,list) do
+ if current.subtype == 44 then
+ local user_id = current.user_id
+ if user_id == 1 then
+ word = { current.value, current, current }
+ words[#words+1] = word
+ elseif user_id == 2 then
+ word[3] = current
+ end
+ end
+ end
+ return words -- check for empty (elsewhere)
+end
+
+-- we could avoid a hpack but hpack is not that slow
+
+local function doit(word,list,best,width,badness,line,set,listdir)
+ local changed = 0
+ local n = word[1]
+ local found = cache[n]
+ if found then
+ local original, attribute, direction = found.original, found.attribute, found.direction
+ local solution = solutions[attribute]
+ local features = solution and solution[set]
+ if features then
+ local featurenumber = features[best] -- not ok probably
+ if featurenumber then
+ noftries = noftries + 1
+ local first = copy_nodelist(original)
+ if not trace_colors then
+ for n in traverse_nodes(first) do -- maybe fast force so no attr needed
+ set_attribute(n,0,featurenumber) -- this forces dynamics
+ end
+ elseif set == "less" then
+ for n in traverse_nodes(first) do
+ fcs(n,"font:isol")
+ set_attribute(n,0,featurenumber)
+ end
+ else
+ for n in traverse_nodes(first) do
+ fcs(n,"font:medi")
+ set_attribute(n,0,featurenumber)
+ end
+ end
+ local font = found.font
+ local dynamics = found.dynamics
+ if not dynamics then -- we cache this
+ dynamics = fontdata[font].shared.dynamics
+ found.dynamics = dynamics
+ end
+ local processors = found[featurenumber]
+ if not processors then -- we cache this too
+ processors = set_dynamics(font,dynamics,featurenumber)
+ found[featurenumber] = processors
+ end
+ for i=1,#processors do -- often more than 1
+ first = processors[i](first,font,featurenumber) -- we can make a special one that already passes the dynamics
+ end
+ first = inject_kerns(first)
+ local h = word[2].next -- head of current word
+ local t = word[3].prev -- tail of current word
+ if first.id == whatsit then
+ local temp = first
+ first = first.next
+ free_node(temp)
+ end
+ local last = find_node_tail(first)
+ -- replace [u]h->t by [u]first->last
+ local next, prev = t.next, h.prev
+ prev.next, first.prev = first, prev
+ if next then
+ last.next, next.prev = next, last
+ end
+ -- check new pack
+ local temp, b = repack_hlist(list,width,'exactly',listdir)
+ if b > badness then
+ if trace_optimize then
+ report_optimizer("line %s, badness before: %s, after: %s, criterium: %s -> quit",line,badness,b,criterium)
+ end
+ -- remove last insert
+ prev.next, h.prev = h, prev
+ if next then
+ t.next, next.prev = next, t
+ else
+ t.next = nil
+ end
+ last.next = nil
+ free_nodelist(first)
+ else
+ if trace_optimize then
+ report_optimizer("line %s, badness before: %s, after: %s, criterium: %s -> continue",line,badness,b,criterium)
+ end
+ -- free old h->t
+ t.next = nil
+ free_nodelist(h)
+ changed, badness = changed + 1, b
+ end
+ if b <= criterium then
+ return true, changed
+ end
+ end
+ end
+ end
+ return false, changed
+end
+
+-- We repeat some code but adding yet another layer of indirectness is not
+-- making things better.
+
+variants[variables.normal] = function(words,list,best,width,badness,line,set,listdir)
+ local changed = 0
+ for i=1,#words do
+ local done, c = doit(words[i],list,best,width,badness,line,set,listdir)
+ changed = changed + c
+ if done then
+ break
+ end
+ end
+ if changed > 0 then
+ nofadapted = nofadapted + 1
+ -- todo: get rid of pack when ok because we already have packed and we only need the last b
+ local list, b = repack_hlist(list,width,'exactly',listdir)
+ return list, true, changed, b -- badness
+ else
+ nofkept = nofkept + 1
+ return list, false, 0, badness
+ end
+end
+
+variants[variables.reverse] = function(words,list,best,width,badness,line,set,listdir)
+ local changed = 0
+ for i=#words,1,-1 do
+ local done, c = doit(words[i],list,best,width,badness,line,set,listdir)
+ changed = changed + c
+ if done then
+ break
+ end
+ end
+ if changed > 0 then
+ nofadapted = nofadapted + 1
+ -- todo: get rid of pack when ok because we already have packed and we only need the last b
+ local list, b = repack_hlist(list,width,'exactly',listdir)
+ return list, true, changed, b -- badness
+ else
+ nofkept = nofkept + 1
+ return list, false, 0, badness
+ end
+end
+
+variants[variables.random] = function(words,list,best,width,badness,line,set,listdir)
+ local changed = 0
+ while #words > 0 do
+ local done, c = doit(remove(words,random(1,#words)),list,best,width,badness,line,set,listdir)
+ changed = changed + c
+ if done then
+ break
+ end
+ end
+ if changed > 0 then
+ nofadapted = nofadapted + 1
+ -- todo: get rid of pack when ok because we already have packed and we only need the last b
+ local list, b = repack_hlist(list,width,'exactly',listdir)
+ return list, true, changed, b -- badness
+ else
+ nofkept = nofkept + 1
+ return list, false, 0, badness
+ end
+end
+
+optimize = variants.normal -- the default
+
+local function show_quality(current,what,line)
+ local set = current.glue_set
+ local sign = current.glue_sign
+ local order = current.glue_order
+ local amount = set * ((sign == 2 and -1) or 1)
+ report_optimizer("line %s, %s, amount %s, set %s, sign %s (%s), order %s",line,what,amount,set,sign,how,order)
+end
+
+function splitters.optimize(head)
+ local nc = #cache
+ if nc > 0 then
+ starttiming(splitters)
+ local listdir = nil -- todo ! ! !
+ if randomseed then
+ math.setrandomseedi(randomseed)
+ randomseed = nil
+ end
+ local line = 0
+ local tex_hbadness, tex_hfuzz = tex.hbadness, tex.hfuzz
+ tex.hbadness, tex.hfuzz = 10000, number.maxdimen
+ if trace_optimize then
+ report_optimizer("preroll: %s, variant: %s, preroll criterium: %s, cache size: %s",
+ tostring(preroll),variant,criterium,nc)
+ end
+ for current in traverse_ids(hlist,head) do
+ -- report_splitter("before: [%s] => %s",current.dir,nodes.tosequence(current.list,nil))
+ line = line + 1
+ local sign, dir, list, width = current.glue_sign, current.dir, current.list, current.width
+ local temp, badness = repack_hlist(list,width,'exactly',dir) -- it would be nice if the badness was stored in the node
+ if badness > 0 then
+ if sign == 0 then
+ if trace_optimize then
+ report_optimizer("line %s, badness %s, okay",line,badness)
+ end
+ else
+ local set, max
+ if sign == 1 then
+ if trace_optimize then
+ report_optimizer("line %s, badness %s, underfull, trying more",line,badness)
+ end
+ set, max = "more", max_more
+ else
+ if trace_optimize then
+ report_optimizer("line %s, badness %s, overfull, trying less",line,badness)
+ end
+ set, max = "less", max_less
+ end
+ -- we can keep the best variants
+ local lastbest, lastbadness = nil, badness
+ if preroll then
+ local bb, base
+ for i=1,max do
+ if base then
+ free_nodelist(base)
+ end
+ base = copy_nodelist(list)
+ local words = collect_words(base) -- beware: words is adapted
+ for j=i,max do
+ local temp, done, changes, b = optimize(words,base,j,width,badness,line,set,dir)
+ base = temp
+ if trace_optimize then
+ report_optimizer("line %s, alternative: %s.%s, changes: %s, badness %s",line,i,j,changes,b)
+ end
+ bb = b
+ if b <= criterium then
+ break
+ end
+ -- if done then
+ -- break
+ -- end
+ end
+ if bb and bb > criterium then -- needs checking
+ if not lastbest then
+ lastbest, lastbadness = i, bb
+ elseif bb > lastbadness then
+ lastbest, lastbadness = i, bb
+ end
+ else
+ break
+ end
+ end
+ free_nodelist(base)
+ end
+ local words = collect_words(list)
+ for best=lastbest or 1,max do
+ local temp, done, changes, b = optimize(words,list,best,width,badness,line,set,dir)
+ current.list = temp
+ if trace_optimize then
+ report_optimizer("line %s, alternative: %s, changes: %s, badness %s",line,best,changes,b)
+ end
+ if done then
+ if b <= criterium then -- was == 0
+ protect_nodes(list)
+ break
+ end
+ end
+ end
+ end
+ else
+ if trace_optimize then
+ report_optimizer("line %s, not bad enough",line)
+ end
+ end
+ -- we pack inside the outer hpack and that way keep the original wd/ht/dp as bonus
+ current.list = hpack_nodes(current.list,width,'exactly',listdir)
+ -- report_splitter("after: [%s] => %s",temp.dir,nodes.tosequence(temp.list,nil))
+ end
+ for i=1,nc do
+ local ci = cache[i]
+ free_nodelist(ci.original)
+ end
+ cache = { }
+ tex.hbadness, tex.hfuzz = tex_hbadness, tex_hfuzz
+ stoptiming(splitters)
+ end
+end
+
+statistics.register("optimizer statistics", function()
+ if nofwords > 0 then
+ local elapsed = statistics.elapsedtime(splitters)
+ local average = noftries/elapsed
+ return format("%s words identified in %s paragraphs, %s words retried, %s lines tried, %0.3f seconds used, %s adapted, %0.1f lines per second",
+ nofwords,nofparagraphs,noftries,nofadapted+nofkept,elapsed,nofadapted,average)
+ end
+end)
+
+function splitters.enable()
+ tasks.enableaction("processors", "parbuilders.solutions.splitters.split")
+ tasks.enableaction("finalizers", "parbuilders.solutions.splitters.optimize")
+end
+
+function splitters.disable()
+ tasks.disableaction("processors", "parbuilders.solutions.splitters.split")
+ tasks.disableaction("finalizers", "parbuilders.solutions.splitters.optimize")
+end
diff --git a/tex/context/base/node-spl.mkiv b/tex/context/base/node-spl.mkiv
new file mode 100644
index 000000000..af98f45c7
--- /dev/null
+++ b/tex/context/base/node-spl.mkiv
@@ -0,0 +1,114 @@
+%D \module
+%D [ file=node-spl,
+%D version=2009.05.19,
+%D title=\CONTEXT\ Node Macros,
+%D subtitle=Splitters,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright=PRAGMA]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Node Support / Splitters}
+
+\registerctxluafile{node-spl}{1.001}
+
+\definesystemattribute[splitter] \chardef\splitterattribute \dogetattributeid{splitter}
+
+%D This module is specially made for the oriental \TEX\ project. The working is as
+%D follows (and tuned for fonts like Idris' Husayni. The following method came to
+%D my mind after a couple of Skype sessions with Idris while working on the rough
+%D edges of the Husayni font and playing with font dynamics.
+%D
+%D \startitemize[packed]
+%D
+%D \item We define a couple of features sets, some can have stylistics variants
+%D that result in the same words getting a different width. Normally this
+%D happens in a goodies file.
+%D
+%D \item We group such features in a solution set. A solutionset can be enabled
+%D by setting an attribute.
+%D
+%D \item For each paragraph we identify words that get this set applied. We replace
+%D these words by a user node that refers to the original.
+%D
+%D \item For each word we apply the features to a copy that we associate with this
+%D original word.
+%D
+%D \item At the end we have a paragraph (node list) with user nodes that point to a
+%D cache that has originals and processed variants.
+%D
+%D \item When the paragraph is broken into lines we optimize the spacing by
+%D substituting variants.
+%D
+%D \stopitemize
+%D
+%D This approach permits us to use a dedicated paragraph builder, one that treats
+%D the user nodes special and takes the alternatives into account.
+%D
+%D Currently we assume only one solution being active. Maybe some day I'll support
+%D a mixture. This is only one way of optimizing and after several experiments this
+%D one was chosen as testcase. It took quite some experiments (and time) to get thus
+%D far.
+%D
+%D The is experimental code for the Oriental \TEX\ project and aspects of it might
+%D change.
+%D
+%D \starttyping
+%D \setupfontsolutions[method={random,preroll},criterium=1,randomseed=101]
+%D
+%D \definefontsolution % actually only the last run needs to be done this way
+%D [FancyHusayni]
+%D [goodies=husayni,
+%D solution=experimental]
+%D
+%D \definedfont[husayni*husayni-default at 24pt]
+%D \setupinterlinespace[line=36pt]
+%D \righttoleft
+%D \enabletrackers[parbuilders.solutions.splitters.colors]
+%D \setfontsolution[FancyHusayni]
+%D alb alb alb \par
+%D \resetfontsolution
+%D \disabletrackers[parbuilders.solutions.splitters.colors]
+%D \stoptyping
+
+\unprotect
+
+\newtoks\everysetupfontsolutions
+
+\unexpanded\def\definefontsolution
+ {\dodoubleargument\dodefinefontsolution}
+
+\def\dodefinefontsolution[#1][#2]% we could set the attribute at the lua end
+ {\setxvalue{\??fu:#1}{\attribute\splitterattribute\ctxlua{parbuilders.solutions.splitters.define("#1","#2")}\relax}}
+
+\unexpanded\def\setfontsolution[#1]%
+ {\ctxlua{parbuilders.solutions.splitters.enable()}%
+ \csname\??fu:#1\endcsname}
+
+\unexpanded\def\resetfontsolution
+ {\ctxlua{parbuilders.solutions.splitters.disable()}%
+ \attribute\splitterattribute\attributeunsetvalue}
+
+\letvalue{\??fu:\v!reset}\resetfontsolution
+
+\unexpanded\def\setupfontsolutions[#1]%
+ {\getparameters[\??fu][#1]%
+ \the\everysetupfontsolutions}
+
+\appendtoks
+ \ctxlua{parbuilders.solutions.splitters.setup {
+ method = "\@@fumethod",
+ criterium = "\@@fucriterium",
+ }}%
+\to \everysetupfontsolutions
+
+% We initialize this module at the \LUA\ end.
+%
+% \setupfontsolutions
+% [\c!method={\v!normal,preroll},
+% \c!criterium=0]
+
+\protect
diff --git a/tex/context/base/node-tra.lua b/tex/context/base/node-tra.lua
index 5acd70baf..880b21b81 100644
--- a/tex/context/base/node-tra.lua
+++ b/tex/context/base/node-tra.lua
@@ -16,6 +16,8 @@ local format, match, concat, rep, utfchar = string.format, string.match, table.c
local ctxcatcodes = tex.ctxcatcodes
+local report_nodes = logs.new("nodes")
+
fonts = fonts or { }
fonts.tfm = fonts.tfm or { }
fonts.ids = fonts.ids or { }
@@ -380,13 +382,13 @@ end
function nodes.report(t,done)
if done then
if status.output_active then
- logs.report("nodes","output, changed, %s nodes",nodes.count(t))
+ report_nodes("output, changed, %s nodes",nodes.count(t))
else
texio.write("nodes","normal, changed, %s nodes",nodes.count(t))
end
else
if status.output_active then
- logs.report("nodes","output, unchanged, %s nodes",nodes.count(t))
+ report_nodes("output, unchanged, %s nodes",nodes.count(t))
else
texio.write("nodes","normal, unchanged, %s nodes",nodes.count(t))
end
diff --git a/tex/context/base/node-tsk.lua b/tex/context/base/node-tsk.lua
index 206b4a266..66f691ec8 100644
--- a/tex/context/base/node-tsk.lua
+++ b/tex/context/base/node-tsk.lua
@@ -10,6 +10,8 @@ if not modules then modules = { } end modules ['node-tsk'] = {
local trace_tasks = false trackers.register("tasks.creation", function(v) trace_tasks = v end)
+local report_tasks = logs.new("tasks")
+
tasks = tasks or { }
tasks.data = tasks.data or { }
@@ -87,7 +89,7 @@ end
function tasks.showactions(name,group,action,where,kind)
local data = tasks.data[name]
if data then
- logs.report("nodes","task %s, list:\n%s",name,sequencer.nodeprocessor(data.list))
+ report_tasks("task %s, list:\n%s",name,sequencer.nodeprocessor(data.list))
end
end
@@ -118,7 +120,7 @@ function tasks.actions(name,n) -- we optimize for the number or arguments (no ..
if not runner then
created = created + 1
if trace_tasks then
- logs.report("nodes","creating task runner '%s'",name)
+ report_tasks("creating runner '%s'",name)
end
runner = compile(data.list,nodeprocessor,0)
data.runner = runner
@@ -132,7 +134,7 @@ function tasks.actions(name,n) -- we optimize for the number or arguments (no ..
if not runner then
created = created + 1
if trace_tasks then
- logs.report("nodes","creating task runner '%s' with 1 extra arguments",name)
+ report_tasks("creating runner '%s' with 1 extra arguments",name)
end
runner = compile(data.list,nodeprocessor,1)
data.runner = runner
@@ -146,7 +148,7 @@ function tasks.actions(name,n) -- we optimize for the number or arguments (no ..
if not runner then
created = created + 1
if trace_tasks then
- logs.report("nodes","creating task runner '%s' with 2 extra arguments",name)
+ report_tasks("creating runner '%s' with 2 extra arguments",name)
end
runner = compile(data.list,nodeprocessor,2)
data.runner = runner
@@ -160,7 +162,7 @@ function tasks.actions(name,n) -- we optimize for the number or arguments (no ..
if not runner then
created = created + 1
if trace_tasks then
- logs.report("nodes","creating task runner '%s' with 3 extra arguments",name)
+ report_tasks("creating runner '%s' with 3 extra arguments",name)
end
runner = compile(data.list,nodeprocessor,3)
data.runner = runner
@@ -174,7 +176,7 @@ function tasks.actions(name,n) -- we optimize for the number or arguments (no ..
if not runner then
created = created + 1
if trace_tasks then
- logs.report("nodes","creating task runner '%s' with 4 extra arguments",name)
+ report_tasks("creating runner '%s' with 4 extra arguments",name)
end
runner = compile(data.list,nodeprocessor,4)
data.runner = runner
@@ -188,7 +190,7 @@ function tasks.actions(name,n) -- we optimize for the number or arguments (no ..
if not runner then
created = created + 1
if trace_tasks then
- logs.report("nodes","creating task runner '%s' with 5 extra arguments",name)
+ report_tasks("creating runner '%s' with 5 extra arguments",name)
end
runner = compile(data.list,nodeprocessor,5)
data.runner = runner
@@ -202,7 +204,7 @@ function tasks.actions(name,n) -- we optimize for the number or arguments (no ..
if not runner then
created = created + 1
if trace_tasks then
- logs.report("nodes","creating task runner '%s' with n extra arguments",name)
+ report_tasks("creating runner '%s' with n extra arguments",name)
end
runner = compile(data.list,nodeprocessor,"n")
data.runner = runner
diff --git a/tex/context/base/page-flt.lua b/tex/context/base/page-flt.lua
index 74d1e4e8c..b0f332993 100644
--- a/tex/context/base/page-flt.lua
+++ b/tex/context/base/page-flt.lua
@@ -14,6 +14,8 @@ local copy_node_list = node.copy_list
local trace_floats = false trackers.register("graphics.floats", function(v) trace_floats = v end) -- name might change
+local report_floats = logs.new("floats")
+
-- we use floatbox, floatwidth, floatheight
-- text page leftpage rightpage (todo: top, bottom, margin, order)
@@ -83,22 +85,26 @@ end
function floats.save(which,data)
which = which or default
- local stack = stacks[which]
- noffloats = noffloats + 1
local b = texbox.floatbox
- local w, h, d = b.width, b.height, b.depth
- local t = {
- n = noffloats,
- data = data or { },
- box = copy_node_list(b),
- }
- texbox.floatbox = nil
- insert(stack,t)
- setcount("global","savednoffloats",#stacks[default])
- if trace_floats then
- logs.report("floats","saving %s float %s in slot %s (%i,%i,%i)",which,noffloats,#stack,w,h,d)
+ if b then
+ local stack = stacks[which]
+ noffloats = noffloats + 1
+ local w, h, d = b.width, b.height, b.depth
+ local t = {
+ n = noffloats,
+ data = data or { },
+ box = copy_node_list(b),
+ }
+ texbox.floatbox = nil
+ insert(stack,t)
+ setcount("global","savednoffloats",#stacks[default])
+ if trace_floats then
+ report_floats("saving %s float %s in slot %s (%i,%i,%i)",which,noffloats,#stack,w,h,d)
+ else
+ interfaces.showmessage("floatblocks",2,noffloats)
+ end
else
- interfaces.showmessage("floatblocks",2,noffloats)
+ report_floats("unable to save %s float %s (empty)",which,noffloats)
end
end
@@ -113,12 +119,12 @@ function floats.resave(which)
insert(stack,1,last)
setcount("global","savednoffloats",#stacks[default])
if trace_floats then
- logs.report("floats","resaving %s float %s in slot %s (%i,%i,%i)",which,noffloats,#stack,w,h,d)
+ report_floats("resaving %s float %s in slot %s (%i,%i,%i)",which,noffloats,#stack,w,h,d)
else
interfaces.showmessage("floatblocks",2,noffloats)
end
else
- logs.report("floats","unable to resave float")
+ report_floats("unable to resave float")
end
end
@@ -129,14 +135,14 @@ function floats.flush(which,n)
if t then
local w, h, d = setdimensions(b)
if trace_floats then
- logs.report("floats","flushing %s float %s from slot %s (%i,%i,%i)",which,t.n,n,w,h,d)
+ report_floats("flushing %s float %s from slot %s (%i,%i,%i)",which,t.n,n,w,h,d)
else
interfaces.showmessage("floatblocks",3,t.n)
end
texbox.floatbox = b
last = remove(stack,n)
last.box = nil
- setcount("global","savednoffloats",#stacks[default])
+ setcount("global","savednoffloats",#stacks[default]) -- default?
else
setdimensions()
end
@@ -156,12 +162,12 @@ function floats.consult(which,n)
if t then
local w, h, d = setdimensions(b)
if trace_floats then
- logs.report("floats","consulting %s float %s in slot %s (%i,%i,%i)",which,t.n,n,w,h,d)
+ report_floats("consulting %s float %s in slot %s (%i,%i,%i)",which,t.n,n,w,h,d)
end
return t, b, n
else
if trace_floats then
- logs.report("floats","nothing to consult")
+ report_floats("nothing to consult")
end
setdimensions()
end
diff --git a/tex/context/base/page-flt.mkiv b/tex/context/base/page-flt.mkiv
index 944626b8e..370b40e89 100644
--- a/tex/context/base/page-flt.mkiv
+++ b/tex/context/base/page-flt.mkiv
@@ -230,4 +230,20 @@
\ifdefined\doflushfloats\else \let\doflushfloats\relax \fi
\ifdefined\flushfloatbox\else \let\flushfloatbox\relax \fi
+% temp hack, needed to prevent floatbox being forgotten during
+% output, this will change to using another box for flushing
+%
+% \dorecurse{700}{text } \placefigure[top][]{First} {\framed{bla 1}}
+% \placefigure[top][]{Second}{\framed{bla 2}}
+% \dorecurse {40}{text } \placefigure[top][]{Third} {\framed{bla 3}}
+
+\newbox\savedfloatbox
+
+\appendtoks
+ \global\setbox\savedfloatbox\box\floatbox
+\to \everybeforeoutput
+\appendtoks
+ \global\setbox\floatbox\box\savedfloatbox
+\to \everyafteroutput
+
\protect \endinput
diff --git a/tex/context/base/page-lin.lua b/tex/context/base/page-lin.lua
index 1f2c96251..e11730eae 100644
--- a/tex/context/base/page-lin.lua
+++ b/tex/context/base/page-lin.lua
@@ -10,6 +10,8 @@ if not modules then modules = { } end modules ['page-lin'] = {
local trace_numbers = false trackers.register("lines.numbers", function(v) trace_numbers = v end)
+local report_lines = logs.new("lines")
+
local format = string.format
local texsprint, texwrite, texbox = tex.sprint, tex.write, tex.box
@@ -120,7 +122,7 @@ function nodes.lines.boxed.register(configuration)
last = last + 1
data[last] = configuration
if trace_numbers then
- logs.report("lines","registering setup %s",last)
+ report_lines("registering setup %s",last)
end
return last
end
@@ -129,14 +131,14 @@ function nodes.lines.boxed.setup(n,configuration)
local d = data[n]
if d then
if trace_numbers then
- logs.report("lines","updating setup %s",n)
+ report_lines("updating setup %s",n)
end
for k,v in next, configuration do
d[k] = v
end
else
if trace_numbers then
- logs.report("lines","registering setup %s (br)",n)
+ report_lines("registering setup %s (br)",n)
end
data[n] = configuration
end
@@ -154,7 +156,7 @@ local function check_number(n,a,skip) -- move inline
local tag = d.tag or ""
texsprint(ctxcatcodes, format("\\makenumber{%s}{%s}{%s}{%s}{%s}{%s}\\endgraf",tag,s,n.shift,n.width,the_left_margin(n.list),n.dir))
if trace_numbers then
- logs.report("numbers","making number %s for setup %s: %s (%s)",#current_list,a,s,d.continue or "no")
+ report_lines("making number %s for setup %s: %s (%s)",#current_list,a,s,d.continue or "no")
end
else
texsprint(ctxcatcodes, "\\skipnumber\\endgraf")
diff --git a/tex/context/base/page-mul.mkiv b/tex/context/base/page-mul.mkiv
index 88ec7a5e7..c64fd0c64 100644
--- a/tex/context/base/page-mul.mkiv
+++ b/tex/context/base/page-mul.mkiv
@@ -1415,7 +1415,6 @@
%D When handling lots of (small) floats spacing can get worse
%D because of lining out the columns.
-
\def\doflushcolumnfloats
{\ifpostponecolumnfloats\else
\bgroup
diff --git a/tex/context/base/page-str.lua b/tex/context/base/page-str.lua
index c4d1957c3..234e5424f 100644
--- a/tex/context/base/page-str.lua
+++ b/tex/context/base/page-str.lua
@@ -20,6 +20,8 @@ local new_glyph = nodes.glyph
local trace_collecting = false trackers.register("streams.collecting", function(v) trace_collecting = v end)
local trace_flushing = false trackers.register("streams.flushing", function(v) trace_flushing = v end)
+local report_streams = logs.new("streams")
+
streams = streams or { }
local data, name, stack = { }, nil, { }
@@ -63,7 +65,7 @@ function streams.collect(head,where)
dana[1] = head
end
if trace_collecting then
- logs.report("streams","appending snippet '%s' to slot %s",name,#dana)
+ report_streams("appending snippet '%s' to slot %s",name,#dana)
end
return nil, true
else
@@ -80,7 +82,7 @@ function streams.push(thename)
if dana then
dana[#dana+1] = false
if trace_collecting then
- logs.report("streams","pushing snippet '%s'",thename)
+ report_streams("pushing snippet '%s'",thename)
end
end
end
@@ -94,7 +96,7 @@ function streams.flush(name,copy) -- problem: we need to migrate afterwards
-- nothing to flush
elseif copy then
if trace_flushing then
- logs.report("streams","flushing copies of %s slots of '%s'",dn,name)
+ report_streams("flushing copies of %s slots of '%s'",dn,name)
end
for i=1,dn do
local di = dana[i]
@@ -107,7 +109,7 @@ function streams.flush(name,copy) -- problem: we need to migrate afterwards
end
else
if trace_flushing then
- logs.report("streams","flushing %s slots of '%s'",dn,name)
+ report_streams("flushing %s slots of '%s'",dn,name)
end
for i=1,dn do
local di = dana[i]
@@ -126,7 +128,7 @@ function streams.synchronize(list) -- this is an experiment !
list = aux.settings_to_array(list)
local max = 0
if trace_flushing then
- logs.report("streams","synchronizing list: %s",concat(list," "))
+ report_streams("synchronizing list: %s",concat(list," "))
end
for i=1,#list do
local dana = data[list[i]]
@@ -138,7 +140,7 @@ function streams.synchronize(list) -- this is an experiment !
end
end
if trace_flushing then
- logs.report("streams","maximum number of slots: %s",max)
+ report_streams("maximum number of slots: %s",max)
end
for m=1,max do
local height, depth = 0, 0
@@ -157,12 +159,12 @@ function streams.synchronize(list) -- this is an experiment !
end
dana[m] = vbox
if trace_flushing then
- logs.report("streams","slot %s of '%s' is packed to height %s and depth %s",m,name,ht,dp)
+ report_streams("slot %s of '%s' is packed to height %s and depth %s",m,name,ht,dp)
end
end
end
if trace_flushing then
- logs.report("streams","slot %s has max height %s and max depth %s",m,height,depth)
+ report_streams("slot %s has max height %s and max depth %s",m,height,depth)
end
local strutht, strutdp = texdimen.globalbodyfontstrutheight, texdimen.globalbodyfontstrutdepth
local struthtdp = strutht + strutdp
@@ -178,7 +180,7 @@ function streams.synchronize(list) -- this is an experiment !
-- actually we need to add glue and repack
vbox.height, vbox.depth = height, depth
if trace_flushing then
- logs.report("streams","slot %s of '%s' with delta (%s,%s) is compensated",m,i,delta_height,delta_depth)
+ report_streams("slot %s of '%s' with delta (%s,%s) is compensated",m,i,delta_height,delta_depth)
end
else
-- this is not yet ok as we also need to keep an eye on vertical spacing
@@ -199,7 +201,7 @@ function streams.synchronize(list) -- this is an experiment !
vbox.list = nil
free_node(vbox)
if trace_flushing then
- logs.report("streams","slot %s:%s with delta (%s,%s) is compensated by %s lines",m,i,delta_height,delta_depth,n)
+ report_streams("slot %s:%s with delta (%s,%s) is compensated by %s lines",m,i,delta_height,delta_depth,n)
end
end
end
diff --git a/tex/context/base/page-str.mkii b/tex/context/base/page-str.mkii
index cfaebe398..71d76484e 100644
--- a/tex/context/base/page-str.mkii
+++ b/tex/context/base/page-str.mkii
@@ -325,51 +325,6 @@
\box2\vfill\page
\egroup}
- %D Although one can put floats in a stream, it sometimes makes sense
- %D to keep them apart and this is what local floats do.
-
- \def\setuplocalfloats
- {\getparameters[\??lf]}
-
- \setuplocalfloats
- [%before=\blank,
- %after=\blank,
- inbetween=\blank]
-
- \installfloathandler \v!local \somelocalfloat
-
- \initializeboxstack{localfloats}
-
- \newcounter\noflocalfloats
-
- \def\resetlocalfloats
- {\doglobal\newcounter\noflocalfloats
- \initializeboxstack{localfloats}}
-
- \def\somelocalfloat[#1]%
- {\doglobal\increment\noflocalfloats
- \savebox{localfloats}{\noflocalfloats}{\box\floatbox}}
-
- \def\getlocalfloats
- {\dorecurse\noflocalfloats
- {\ifnum\recurselevel=\plusone % 1\relax
- \getvalue{\??lf\c!before}%
- \else
- \getvalue{\??lf\c!inbetween}%
- \fi
- \dontleavehmode\hbox{\foundbox{localfloats}\recurselevel}%
- \ifnum\recurselevel=\noflocalfloats\relax
- \getvalue{\??lf\c!after}%
- \fi}}
-
- \def\flushlocalfloats
- {\getlocalfloats
- \resetlocalfloats}
-
- \def\getlocalfloat#1{\expanded{\foundbox{localfloats}{\number#1}}}
-
- \def\forcelocalfloats{\let\forcedfloatmethod\v!local}
-
%D Because many arrangements are possible, we will implement
%D some examples in a runtime loadable module \type {m-streams}.
diff --git a/tex/context/base/s-abr-01.tex b/tex/context/base/s-abr-01.tex
index a55eb95f1..b233eeee2 100644
--- a/tex/context/base/s-abr-01.tex
+++ b/tex/context/base/s-abr-01.tex
@@ -287,33 +287,36 @@
\def\SystemSpecialA#1{$\langle\it#1\rangle$}
\def\SystemSpecialB#1{{\tttf<#1>}}
-\def\CATCODE {\SystemSpecialA{catcode}}
-\def\CATCODES {\SystemSpecialA{catcodes}}
-\def\DIMENSION {\SystemSpecialA{dimension}}
-\def\DIMENSIONS {\SystemSpecialA{dimensions}}
-\def\COUNTER {\SystemSpecialA{counter}}
-\def\COUNTERS {\SystemSpecialA{counters}}
-\def\HBOX {\SystemSpecialA{hbox}}
-\def\HBOXES {\SystemSpecialA{hboxes}}
-\def\VBOX {\SystemSpecialA{vbox}}
-\def\VBOXES {\SystemSpecialA{vboxes}}
-\def\BOX {\SystemSpecialA{box}}
-\def\BOXES {\SystemSpecialA{boxes}}
-\def\TOKENLIST {\SystemSpecialA{token list}}
-\def\TOKENLISTS {\SystemSpecialA{token lists}}
-\def\NEWLINE {\SystemSpecialA{newline}}
-\def\SKIP {\SystemSpecialA{skip}}
-\def\SKIPS {\SystemSpecialA{skips}}
-\def\MUSKIP {\SystemSpecialA{muskip}}
-\def\MUSKIPS {\SystemSpecialA{muskips}}
-\def\MARK {\SystemSpecialA{mark}}
-\def\MARKS {\SystemSpecialA{marks}}
+\unexpanded\def\CATCODE {\SystemSpecialA{catcode}}
+\unexpanded\def\CATCODES {\SystemSpecialA{catcodes}}
+\unexpanded\def\DIMENSION {\SystemSpecialA{dimension}}
+\unexpanded\def\DIMENSIONS {\SystemSpecialA{dimensions}}
+\unexpanded\def\COUNTER {\SystemSpecialA{counter}}
+\unexpanded\def\COUNTERS {\SystemSpecialA{counters}}
+\unexpanded\def\HBOX {\SystemSpecialA{hbox}}
+\unexpanded\def\HBOXES {\SystemSpecialA{hboxes}}
+\unexpanded\def\VBOX {\SystemSpecialA{vbox}}
+\unexpanded\def\VBOXES {\SystemSpecialA{vboxes}}
+\unexpanded\def\BOX {\SystemSpecialA{box}}
+\unexpanded\def\BOXES {\SystemSpecialA{boxes}}
+\unexpanded\def\TOKENLIST {\SystemSpecialA{token list}}
+\unexpanded\def\TOKENLISTS {\SystemSpecialA{token lists}}
+\unexpanded\def\NEWLINE {\SystemSpecialA{newline}}
+\unexpanded\def\SKIP {\SystemSpecialA{skip}}
+\unexpanded\def\SKIPS {\SystemSpecialA{skips}}
+\unexpanded\def\MUSKIP {\SystemSpecialA{muskip}}
+\unexpanded\def\MUSKIPS {\SystemSpecialA{muskips}}
+\unexpanded\def\MARK {\SystemSpecialA{mark}}
+\unexpanded\def\MARKS {\SystemSpecialA{marks}}
-\def\SPACE {\SystemSpecialB{space}}
-\def\EOF {\SystemSpecialB{eof}}
-\def\TAB {\SystemSpecialB{tab}}
-\def\NEWPAGE {\SystemSpecialB{newpage}}
-\def\NEWLINE {\SystemSpecialB{newline}}
+\unexpanded\def\SPACE {\SystemSpecialB{space}}
+\unexpanded\def\EOF {\SystemSpecialB{eof}}
+\unexpanded\def\TAB {\SystemSpecialB{tab}}
+\unexpanded\def\NEWPAGE {\SystemSpecialB{newpage}}
+\unexpanded\def\NEWLINE {\SystemSpecialB{newline}}
+
+\unexpanded\def\LUWATEEKH {لُواتيخ} % kh ī t ā w [u] l
+\unexpanded\def\luwateekh {luwātīkh}
\doifmodeelse {mkiv} {
\unexpanded\def\THANH{H\agrave n Th\ecircumflexacute\ Th\agrave nh}
diff --git a/tex/context/base/s-fnt-23.tex b/tex/context/base/s-fnt-23.tex
index 096c8fbf5..dedcf06e4 100644
--- a/tex/context/base/s-fnt-23.tex
+++ b/tex/context/base/s-fnt-23.tex
@@ -11,6 +11,8 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+% last_data was written wrong so it needs checking
+
\startluacode
local last_data = nil
local format = string.format
@@ -20,7 +22,7 @@
end
function fonts.otf.show_shape(n)
local tfmdata = fonts.ids[font.current()]
- lastdata = tfmdata
+ last_data = tfmdata
local charnum = tonumber(n)
if not charnum then
charnum = tfmdata.unicodes[n]
@@ -197,7 +199,7 @@
end
function fonts.otf.show_all_shapes(start,stop)
local tfmdata = fonts.ids[font.current()]
- lastdata = tfmdata
+ last_data = tfmdata
start, stop = start or "\\startTEXpage\\gobbleoneargument", stop or "\\stopTEXpage"
local unicodes, indices, descriptions = tfmdata.unicodes, tfmdata.indices, tfmdata.descriptions
for _, unicode in next, table.sortedkeys(descriptions) do
@@ -210,7 +212,7 @@
end
end
function fonts.otf.show_shape_field(unicode,name)
- local tfmdata = lastdata or fonts.ids[font.current()]
+ local tfmdata = last_data or fonts.ids[font.current()]
local d = tfmdata.descriptions[unicode]
if d then
if name == "unicode" then
@@ -268,5 +270,6 @@
\startTEXpage \ShowGlyphShape{simplenaskhi}{100bp}{NameMe.1190} \stopTEXpage
\ShowAllGlyphShapes{simplenaskhi}{100bp}
+% \ShowAllGlyphShapes{xits}{100bp}
\stoptext
diff --git a/tex/context/base/s-fnt-25.tex b/tex/context/base/s-fnt-25.tex
index a8b398716..132fa65f9 100644
--- a/tex/context/base/s-fnt-25.tex
+++ b/tex/context/base/s-fnt-25.tex
@@ -167,7 +167,7 @@ function document.showmathfont(id,slot)
if cnext then
report("\\mathfontlistentrynextlist{%s}",table.concat(cnext," => "))
end
- if variants then
+ if cvariants then
report("\\mathfontlistentryvariantslist{%s}",table.concat(cvariants," "))
end
end
@@ -179,16 +179,13 @@ end
\endinput
-\startbuffer[mathtest]
- \begingroup\mm\mr\showmathfontcharacters\endgroup
-\stopbuffer
-
\starttext
- \usetypescript[cambria] \setupbodyfont[cambria, 12pt] \getbuffer[mathtest]
- \usetypescript[lmvirtual] \setupbodyfont[lmvirtual,12pt] \getbuffer[mathtest]
- \usetypescript[pxvirtual] \setupbodyfont[pxvirtual,12pt] \getbuffer[mathtest]
- \usetypescript[txvirtual] \setupbodyfont[txvirtual,12pt] \getbuffer[mathtest]
- \usetypescript[palatino] \setupbodyfont[palatino, 10pt] \getbuffer[mathtest]
- \usetypescript[mathtimes] \setupbodyfont[mathtimes,12pt] \getbuffer[mathtest]
+ \setupbodyfont[cambria, 12pt] \showmathfontcharacters
+ \setupbodyfont[lmvirtual,12pt] \showmathfontcharacters
+ \setupbodyfont[pxvirtual,12pt] \showmathfontcharacters
+ \setupbodyfont[txvirtual,12pt] \showmathfontcharacters
+ \setupbodyfont[palatino, 10pt] \showmathfontcharacters
+ \setupbodyfont[mathtimes,12pt] \showmathfontcharacters
+ \setupbodyfont[stix, 12pt] \showmathfontcharacters
\stoptext
diff --git a/tex/context/base/s-mod-02.tex b/tex/context/base/s-mod-02.tex
index 9dae3ecc0..fac5c23e6 100644
--- a/tex/context/base/s-mod-02.tex
+++ b/tex/context/base/s-mod-02.tex
@@ -37,15 +37,14 @@
\determineregistercharacteristics
[index]
[criterium=section]
- \ifutilitydone
- \pagereference
- [index]
- \placeregister
- [index]
- [balance=yes,
- indicator=no,
- criterium=section]
- \fi}
+ \doifmode{*register}
+ {\pagereference
+ [index]
+ \placeregister
+ [index]
+ [balance=yes,
+ indicator=no,
+ criterium=section]}}
\let\ComposeLists=\relax
diff --git a/tex/context/base/s-pre-50.tex b/tex/context/base/s-pre-50.tex
index 782f6aea1..41ae04821 100644
--- a/tex/context/base/s-pre-50.tex
+++ b/tex/context/base/s-pre-50.tex
@@ -62,7 +62,7 @@
%D Structure and trick.
\def\StartSteps
- {\checkutilities}
+ {\doifnotmode{mkiv}{\checkutilities}}
\def\StopSteps
{\resetcollector[contribution]}
diff --git a/tex/context/base/scrp-cjk.lua b/tex/context/base/scrp-cjk.lua
index 997baaa96..6ac6c5b11 100644
--- a/tex/context/base/scrp-cjk.lua
+++ b/tex/context/base/scrp-cjk.lua
@@ -327,7 +327,7 @@ local function process(head,first,last)
if action then
local font = first.font
if font ~= lastfont then
- lastfont, done = font, true
+ lastfont = font
set_parameters(font,dataset)
end
action(head,first)
@@ -359,7 +359,7 @@ local function process(head,first,last)
previous = "start"
end
end
- if upcoming == stop then
+ if upcoming == last then -- was stop
break
else
first = upcoming
@@ -530,7 +530,7 @@ local function process(head,first,last)
if action then
local font = first.font
if font ~= lastfont then
- lastfont, done = font, true
+ lastfont = font
set_parameters(font,dataset)
end
action(head,first)
@@ -562,7 +562,7 @@ local function process(head,first,last)
previous = "start"
end
end
- if upcoming == stop then
+ if upcoming == last then -- was stop
break
else
first = upcoming
diff --git a/tex/context/base/scrp-ini.lua b/tex/context/base/scrp-ini.lua
index b28c297d0..292f1e779 100644
--- a/tex/context/base/scrp-ini.lua
+++ b/tex/context/base/scrp-ini.lua
@@ -9,6 +9,8 @@ if not modules then modules = { } end modules ['scrp-ini'] = {
local trace_analyzing = false trackers.register("scripts.analyzing", function(v) trace_analyzing = v end)
local trace_injections = false trackers.register("scripts.injections", function(v) trace_injections = v end)
+local report_preprocessing = logs.new("preprocessing")
+
local set_attribute = node.set_attribute
local has_attribute = node.has_attribute
local first_character = node.first_character
@@ -257,9 +259,9 @@ end
local function traced_process(head,first,last,process,a)
if start ~= last then
local f, l = first, last
- logs.report("preprocess","before %s: %s",names[a] or "?",nodes.tosequence(f,l))
+ report_preprocessing("before %s: %s",names[a] or "?",nodes.tosequence(f,l))
process(head,first,last)
- logs.report("preprocess","after %s: %s", names[a] or "?",nodes.tosequence(f,l))
+ report_preprocessing("after %s: %s", names[a] or "?",nodes.tosequence(f,l))
end
end
diff --git a/tex/context/base/sort-ini.lua b/tex/context/base/sort-ini.lua
index b745c9aa5..d029f9bef 100644
--- a/tex/context/base/sort-ini.lua
+++ b/tex/context/base/sort-ini.lua
@@ -20,6 +20,8 @@ local next, type, tonumber = next, type, tonumber
local trace_tests = false trackers.register("sorters.tests", function(v) trace_tests = v end)
+local report_sorters = logs.new("sorters")
+
sorters = { }
sorters.comparers = { }
sorters.splitters = { }
@@ -252,7 +254,7 @@ function sorters.sort(entries,cmp)
if trace_tests then
sort(entries,function(a,b)
local r = cmp(a,b)
- logs.report("sorter","%s %s %s",pack(a),(not r and "?") or (r<0 and "<") or (r>0 and ">") or "=",pack(b))
+ report_sorters("%s %s %s",pack(a),(not r and "?") or (r<0 and "<") or (r>0 and ">") or "=",pack(b))
return r == -1
end)
local s
@@ -263,9 +265,9 @@ function sorters.sort(entries,cmp)
first = " "
else
s = first
- logs.report("sorter",">> %s 0x%05X (%s 0x%05X)",first,utfbyte(first),letter,utfbyte(letter))
+ report_sorters(">> %s 0x%05X (%s 0x%05X)",first,utfbyte(first),letter,utfbyte(letter))
end
- logs.report("sorter"," %s",pack(entry))
+ report_sorters(" %s",pack(entry))
end
else
sort(entries,function(a,b)
diff --git a/tex/context/base/spac-ver.lua b/tex/context/base/spac-ver.lua
index c75eb1baa..e06cb0ded 100644
--- a/tex/context/base/spac-ver.lua
+++ b/tex/context/base/spac-ver.lua
@@ -38,6 +38,11 @@ local trace_page_vspacing = false trackers.register("nodes.page_vspacing",
local trace_collect_vspacing = false trackers.register("nodes.collect_vspacing", function(v) trace_collect_vspacing = v end)
local trace_vspacing = false trackers.register("nodes.vspacing", function(v) trace_vspacing = v end)
local trace_vsnapping = false trackers.register("nodes.vsnapping", function(v) trace_vsnapping = v end)
+local trace_vpacking = false trackers.register("nodes.vpacking", function(v) trace_vpacking = v end)
+
+local report_vspacing = logs.new("vspacing")
+local report_collapser = logs.new("collapser")
+local report_snapper = logs.new("snapper")
local skip_category = attributes.private('skip-category')
local skip_penalty = attributes.private('skip-penalty')
@@ -337,7 +342,7 @@ storage.register("vspacing/data/skip", vspacing.data.skip, "vspacing.data.skip")
do -- todo: interface.variables
local function logger(c,...)
- logs.report("vspacing",concat {...})
+ report_vspacing(concat {...})
texsprint(c,...)
end
@@ -360,7 +365,7 @@ do -- todo: interface.variables
for s in gmatch(str,"([^ ,]+)") do
local amount, keyword, detail = lpegmatch(splitter,s)
if not keyword then
- logs.report("vspacing","unknown directive: %s",s)
+ report_vspacing("unknown directive: %s",s)
else
local mk = map[keyword]
if mk then
@@ -513,13 +518,13 @@ local function show_tracing(head)
for i=1,#trace_list do
local tag, text = unpack(trace_list[i])
if tag == "info" then
- logs.report("collapse",text)
+ report_collapser(text)
else
- logs.report("collapse"," %s: %s",tag,text)
+ report_collapser(" %s: %s",tag,text)
end
end
- logs.report("collapse","before: %s",before)
- logs.report("collapse","after : %s",after)
+ report_collapser("before: %s",before)
+ report_collapser("after : %s",after)
end
end
@@ -582,13 +587,13 @@ function vspacing.snap_box(n,how)
local s = has_attribute(list,snap_method)
if s == 0 then
if trace_vsnapping then
- -- logs.report("snapper", "hlist not snapped, already done")
+ -- report_snapper("hlist not snapped, already done")
end
else
local h, d, ch, cd, lines = snap_hlist(box,sv,box.height,box.depth)
box.height, box.depth = ch, cd
if trace_vsnapping then
- logs.report("snapper", "hlist snapped from (%s,%s) to (%s,%s) using method '%s' (%s) for '%s' (%s lines)",h,d,ch,cd,sv.name,sv.specification,"direct",lines)
+ report_snapper("hlist snapped from (%s,%s) to (%s,%s) using method '%s' (%s) for '%s' (%s lines)",h,d,ch,cd,sv.name,sv.specification,"direct",lines)
end
set_attribute(list,snap_method,0)
end
@@ -609,7 +614,7 @@ local function forced_skip(head,current,width,where,trace)
current = c
end
if trace then
- logs.report("vspacing", "inserting forced skip of %s",width)
+ report_vspacing("inserting forced skip of %s",width)
end
return head, current
end
@@ -629,16 +634,16 @@ local function collapser(head,where,what,trace,snap) -- maybe also pass tail
if penalty_data then
local p = make_penalty_node(penalty_data)
if trace then trace_done("flushed due to " .. why,p) end
- head, _ = insert_node_before(head,current,p)
+ head = insert_node_before(head,current,p)
end
if glue_data then
if force_glue then
if trace then trace_done("flushed due to " .. why,glue_data) end
- head, _ = forced_skip(head,current,glue_data.spec.width,"before",trace)
+ head = forced_skip(head,current,glue_data.spec.width,"before",trace)
free_glue_node(glue_data)
elseif glue_data.spec.writable then
if trace then trace_done("flushed due to " .. why,glue_data) end
- head, _ = insert_node_before(head,current,glue_data)
+ head = insert_node_before(head,current,glue_data)
else
free_glue_node(glue_data)
end
@@ -649,7 +654,7 @@ local function collapser(head,where,what,trace,snap) -- maybe also pass tail
parskip, ignore_parskip, ignore_following, ignore_whitespace = nil, false, false, false
end
if trace_vsnapping then
- logs.report("snapper", "global ht/dp = %s/%s, local ht/dp = %s/%s",
+ report_snapper("global ht/dp = %s/%s, local ht/dp = %s/%s",
texdimen.globalbodyfontstrutheight, texdimen.globalbodyfontstrutdepth,
texdimen.bodyfontstrutheight, texdimen.bodyfontstrutdepth)
end
@@ -662,21 +667,21 @@ local function collapser(head,where,what,trace,snap) -- maybe also pass tail
local s = has_attribute(current,snap_method)
if not s then
-- if trace_vsnapping then
- -- logs.report("snapper", "hlist not snapped")
+ -- report_snapper("hlist not snapped")
-- end
elseif s == 0 then
if trace_vsnapping then
- -- logs.report("snapper", "hlist not snapped, already done")
+ -- report_snapper("hlist not snapped, already done")
end
else
local sv = snapmethods[s]
if sv then
local h, d, ch, cd, lines = snap_hlist(current,sv)
if trace_vsnapping then
- logs.report("snapper", "hlist snapped from (%s,%s) to (%s,%s) using method '%s' (%s) for '%s' (%s lines)",h,d,ch,cd,sv.name,sv.specification,where,lines)
+ report_snapper("hlist snapped from (%s,%s) to (%s,%s) using method '%s' (%s) for '%s' (%s lines)",h,d,ch,cd,sv.name,sv.specification,where,lines)
end
elseif trace_vsnapping then
- logs.report("snapper", "hlist not snapped due to unknown snap specification")
+ report_snapper("hlist not snapped due to unknown snap specification")
end
set_attribute(current,snap_method,0)
end
@@ -694,7 +699,7 @@ local function collapser(head,where,what,trace,snap) -- maybe also pass tail
elseif id == kern then
if snap and trace_vsnapping and current.kern ~= 0 then
--~ current.kern = 0
- logs.report("snapper", "kern of %s (kept)",current.kern)
+ report_snapper("kern of %s (kept)",current.kern)
end
flush("kern")
current = current.next
@@ -836,7 +841,7 @@ local function collapser(head,where,what,trace,snap) -- maybe also pass tail
local spec = writable_spec(current)
spec.width = 0
if trace_vsnapping then
- logs.report("snapper", "lineskip set to zero")
+ report_snapper("lineskip set to zero")
end
end
else
@@ -857,7 +862,7 @@ local function collapser(head,where,what,trace,snap) -- maybe also pass tail
local spec = writable_spec(current)
spec.width = 0
if trace_vsnapping then
- logs.report("snapper", "baselineskip set to zero")
+ report_snapper("baselineskip set to zero")
end
end
else
@@ -895,7 +900,7 @@ local function collapser(head,where,what,trace,snap) -- maybe also pass tail
local sv = snapmethods[s]
local w, cw = snap_topskip(current,sv)
if trace_vsnapping then
- logs.report("snapper", "topskip snapped from %s to %s for '%s'",w,cw,where)
+ report_snapper("topskip snapped from %s to %s for '%s'",w,cw,where)
end
else
if trace then trace_skip("topskip",sc,so,sp,current) end
@@ -932,7 +937,7 @@ local function collapser(head,where,what,trace,snap) -- maybe also pass tail
--
else -- other glue
if snap and trace_vsnapping and current.spec.writable and current.spec.width ~= 0 then
- logs.report("snapper", "%s of %s (kept)",skips[subtype],current.spec.width)
+ report_snapper("%s of %s (kept)",skips[subtype],current.spec.width)
--~ current.spec.width = 0
end
if trace then trace_skip(format("some glue (%s)",subtype),sc,so,sp,current) end
@@ -989,7 +994,7 @@ end
local stackhead, stacktail, stackhack = nil, nil, false
local function report(message,lst)
- logs.report("vspacing",message,count_nodes(lst,true),node_ids_to_string(lst))
+ report_vspacing(message,count_nodes(lst,true),node_ids_to_string(lst))
end
function nodes.handle_page_spacing(newhead,where)
@@ -1144,14 +1149,14 @@ function nodes.builders.vpack_filter(head,groupcode,size,packtype,maxdepth,direc
local done = false
if head then
starttiming(builders)
- if trace_callbacks then
+ if trace_vpacking then
local before = nodes.count(head)
head, done = actions(head,groupcode,size,packtype,maxdepth,direction)
local after = nodes.count(head)
if done then
- tracer("vpack","changed",head,groupcode,before,after,true)
+ nodes.processors.tracer("vpack","changed",head,groupcode,before,after,true)
else
- tracer("vpack","unchanged",head,groupcode,before,after,true)
+ nodes.processors.tracer("vpack","unchanged",head,groupcode,before,after,true)
end
stoptiming(builders)
else
diff --git a/tex/context/base/strc-def.mkiv b/tex/context/base/strc-def.mkiv
index 77793c7eb..94bc2fb14 100644
--- a/tex/context/base/strc-def.mkiv
+++ b/tex/context/base/strc-def.mkiv
@@ -12,7 +12,7 @@
\writestatus{loading}{ConTeXt Structure Macros / Definitions}
-% \registerctxluafile{strc-def}{1.001}
+%registerctxluafile{strc-def}{1.001}
\unprotect
diff --git a/tex/context/base/strc-doc.lua b/tex/context/base/strc-doc.lua
index 7faf0d5b3..0c5cea64f 100644
--- a/tex/context/base/strc-doc.lua
+++ b/tex/context/base/strc-doc.lua
@@ -24,10 +24,7 @@ local variables = interfaces.variables
local trace_sectioning = false trackers.register("structure.sectioning", function(v) trace_sectioning = v end)
local trace_detail = false trackers.register("structure.detail", function(v) trace_detail = v end)
-local function report(...)
---~ print(...)
- logs.report("sectioning:",...)
-end
+local report_structure = logs.new("structure")
structure = structure or { }
structure.helpers = structure.helpers or { }
@@ -207,7 +204,7 @@ function sections.somelevel(given)
-- normally these are passed as argument but nowadays we provide several
-- interfaces (we need this because we want to be compatible)
if trace_detail then
- logs.report("structure","name '%s', mapped level '%s', old depth '%s', new depth '%s', reset set '%s'",givenname,mappedlevel,olddepth,newdepth,resetset)
+ report_structure("name '%s', mapped level '%s', old depth '%s', new depth '%s', reset set '%s'",givenname,mappedlevel,olddepth,newdepth,resetset)
end
local u = given.userdata
if u then
@@ -223,7 +220,7 @@ function sections.somelevel(given)
for i=olddepth+1,newdepth do
local s = tonumber(sets.get("structure:resets",data.block,resetset,i))
if trace_detail then
- logs.report("structure","new>old (%s>%s), reset set '%s', reset value '%s', current '%s'",olddepth,newdepth,resetset,s or "?",numbers[i] or "?")
+ report_structure("new>old (%s>%s), reset set '%s', reset value '%s', current '%s'",olddepth,newdepth,resetset,s or "?",numbers[i] or "?")
end
if not s or s == 0 then
numbers[i] = numbers[i] or 0
@@ -238,7 +235,7 @@ function sections.somelevel(given)
for i=olddepth,newdepth+1,-1 do
local s = tonumber(sets.get("structure:resets",data.block,resetset,i))
if trace_detail then
- logs.report("structure","new<old (%s<%s), reset set '%s', reset value '%s', current '%s'",olddepth,newdepth,resetset,s or "?",numbers[i] or "?")
+ report_structure("new<old (%s<%s), reset set '%s', reset value '%s', current '%s'",olddepth,newdepth,resetset,s or "?",numbers[i] or "?")
end
if not s or s == 0 then
numbers[i] = numbers[i] or 0
@@ -270,12 +267,12 @@ function sections.somelevel(given)
end
forced[newdepth] = nil
if trace_detail then
- logs.report("structure","old depth '%s', new depth '%s, old n '%s', new n '%s', forced '%s'",olddepth,newdepth,oldn,newn,concat(fd,""))
+ report_structure("old depth '%s', new depth '%s, old n '%s', new n '%s', forced '%s'",olddepth,newdepth,oldn,newn,concat(fd,""))
end
elseif newn then
newn = oldn + 1
if trace_detail then
- logs.report("structure","old depth '%s', new depth '%s, old n '%s', new n '%s', increment",olddepth,newdepth,oldn,newn)
+ report_structure("old depth '%s', new depth '%s, old n '%s', new n '%s', increment",olddepth,newdepth,oldn,newn)
end
else
local s = tonumber(sets.get("structure:resets",data.block,resetset,newdepth))
@@ -287,7 +284,7 @@ function sections.somelevel(given)
newn = s - 1
end
if trace_detail then
- logs.report("structure","old depth '%s', new depth '%s, old n '%s', new n '%s', reset",olddepth,newdepth,oldn,newn)
+ report_structure("old depth '%s', new depth '%s, old n '%s', new n '%s', reset",olddepth,newdepth,oldn,newn)
end
end
numbers[newdepth] = newn
@@ -313,7 +310,7 @@ function sections.somelevel(given)
numberdata.ownnumbers = table.fastcopy(ownnumbers)
end
if trace_detail then
- logs.report("structure","name '%s', numbers '%s', own numbers '%s'",givenname,concat(numberdata.numbers, " "),concat(numberdata.ownnumbers, " "))
+ report_structure("name '%s', numbers '%s', own numbers '%s'",givenname,concat(numberdata.numbers, " "),concat(numberdata.ownnumbers, " "))
end
given.references.section = sections.save(given)
-- given.numberdata = nil
@@ -644,7 +641,7 @@ function sections.typesetnumber(entry,kind,...) -- kind='section','number','pref
processors.sprint(ctxcatcodes,stopper)
end
else
- -- report("error: no numbers")
+ -- report_structure("error: no numbers")
end
end
end
diff --git a/tex/context/base/strc-doc.mkiv b/tex/context/base/strc-doc.mkiv
index e10efb3f7..b1f18e91b 100644
--- a/tex/context/base/strc-doc.mkiv
+++ b/tex/context/base/strc-doc.mkiv
@@ -38,7 +38,7 @@
[\??ns]
[\c!number=,\c!level=,\c!name=,\c!title=,\c!bookmark=,\c!marking=,\c!list=,\c!label=,\c!coupling=,\c!ownnumber=,
\c!sectionseparatorset=\s!default,\c!sectionconversionset=\s!default,
- \c!sectionstopper=,\c!sectionstarter,\c!sectionsegments=,
+ \c!sectionstopper=,\c!sectionstarter=,\c!sectionsegments=,
\c!sectionresetset=,\c!reference=,
\c!expansion=\v!no,
\c!xmlsetup=,
diff --git a/tex/context/base/strc-flt.mkii b/tex/context/base/strc-flt.mkii
index e64a439ec..62d02aa2a 100644
--- a/tex/context/base/strc-flt.mkii
+++ b/tex/context/base/strc-flt.mkii
@@ -2139,5 +2139,49 @@
\def\somerightpagefloat{\placesomerightpagefloat}
\def\somefacefloat {\placesomefacefloat}
\def\someslotfloat {\placesomeslotfloat}
-
+
+%D Local floats.
+
+\def\setuplocalfloats
+ {\getparameters[\??lf]}
+
+\setuplocalfloats
+ [%\c!before=\blank,
+ %\c!after=\blank,
+ \c!inbetween=\blank]
+
+\installfloathandler \v!local \somelocalfloat
+
+\initializeboxstack{localfloats}
+
+\newcounter\noflocalfloats
+
+\def\resetlocalfloats
+ {\doglobal\newcounter\noflocalfloats
+ \initializeboxstack{localfloats}}
+
+\def\somelocalfloat[#1]%
+ {\doglobal\increment\noflocalfloats
+ \savebox{localfloats}{\noflocalfloats}{\box\floatbox}}
+
+\def\getlocalfloats
+ {\dorecurse\noflocalfloats
+ {\ifnum\recurselevel=\plusone % 1\relax
+ \getvalue{\??lf\c!before}%
+ \else
+ \getvalue{\??lf\c!inbetween}%
+ \fi
+ \dontleavehmode\hbox{\foundbox{localfloats}\recurselevel}%
+ \ifnum\recurselevel=\noflocalfloats\relax
+ \getvalue{\??lf\c!after}%
+ \fi}}
+
+\def\flushlocalfloats
+ {\getlocalfloats
+ \resetlocalfloats}
+
+\def\getlocalfloat#1{\expanded{\foundbox{localfloats}{\number#1}}}
+
+\def\forcelocalfloats{\let\forcedfloatmethod\v!local}
+
\protect \endinput
diff --git a/tex/context/base/strc-flt.mkiv b/tex/context/base/strc-flt.mkiv
index 67023d701..ea52aa82d 100644
--- a/tex/context/base/strc-flt.mkiv
+++ b/tex/context/base/strc-flt.mkiv
@@ -180,6 +180,7 @@
\def\@@bknlines {\floatsharedparameter\c!nlines } % global one
\def\@@bkmargin {\floatsharedparameter\c!margin } % global one
\def\@@bkcache {\floatsharedparameter\c!cache } % global one
+\def\@@bklocation {\floatsharedparameter\c!location }
% float
%
@@ -1960,5 +1961,49 @@
\def\somerightpagefloat{\placesomerightpagefloat}
\def\somefacefloat {\placesomefacefloat}
\def\someslotfloat {\placesomeslotfloat}
-
+
+%D Local floats:
+
+\def\setuplocalfloats
+ {\getparameters[\??lf]}
+
+\setuplocalfloats
+ [%\c!before=\blank,
+ %\c!after=\blank,
+ \c!inbetween=\blank]
+
+\installfloathandler \v!local \somelocalfloat
+
+\initializeboxstack{localfloats}
+
+\newcounter\noflocalfloats
+
+\def\resetlocalfloats
+ {\doglobal\newcounter\noflocalfloats
+ \initializeboxstack{localfloats}}
+
+\def\somelocalfloat[#1]%
+ {\doglobal\increment\noflocalfloats
+ \savebox{localfloats}{\noflocalfloats}{\box\floatbox}}
+
+\def\getlocalfloats
+ {\dorecurse\noflocalfloats
+ {\ifnum\recurselevel=\plusone % 1\relax
+ \getvalue{\??lf\c!before}%
+ \else
+ \getvalue{\??lf\c!inbetween}%
+ \fi
+ \dontleavehmode\hbox{\foundbox{localfloats}\recurselevel}%
+ \ifnum\recurselevel=\noflocalfloats\relax
+ \getvalue{\??lf\c!after}%
+ \fi}}
+
+\def\flushlocalfloats
+ {\getlocalfloats
+ \resetlocalfloats}
+
+\def\getlocalfloat#1{\expanded{\foundbox{localfloats}{\number#1}}}
+
+\def\forcelocalfloats{\let\forcedfloatmethod\v!local}
+
\protect \endinput
diff --git a/tex/context/base/strc-ini.lua b/tex/context/base/strc-ini.lua
index 61c26a20e..36650cd54 100644
--- a/tex/context/base/strc-ini.lua
+++ b/tex/context/base/strc-ini.lua
@@ -29,6 +29,8 @@ local ctxcatcodes, xmlcatcodes, notcatcodes = tex.ctxcatcodes, tex.xmlcatcodes,
local trace_processors = false trackers.register("structure.processors", function(v) trace_processors = v end)
+local report_processors = logs.new("processors")
+
-- move this
commands = commands or { }
@@ -216,7 +218,7 @@ function processors.sprint(catcodes,str,fnc,...)
code = (fnc and fnc(str,...)) or str
end
if trace_processors then
- logs.report("processors","cct: %s, seq: %s",catcodes,code)
+ report_processors("cct: %s, seq: %s",catcodes,code)
end
texsprint(catcodes,code)
end
diff --git a/tex/context/base/strc-itm.mkiv b/tex/context/base/strc-itm.mkiv
index 7207494ed..ab2f09f40 100644
--- a/tex/context/base/strc-itm.mkiv
+++ b/tex/context/base/strc-itm.mkiv
@@ -71,7 +71,7 @@
\def\dohandleitemreference % we will make a decent number helper
{\ifx\currentitemreference \empty \else
\setnextinternalreference
- \ctxlua { jobreferences.setandgetattribute("\s!full", "\referenceprefix","\currentitemreference",
+ \ctxlua {jobreferences.setandgetattribute("\s!full", "\referenceprefix","\currentitemreference",
{
metadata = {
kind = "item",% ?
diff --git a/tex/context/base/strc-lst.lua b/tex/context/base/strc-lst.lua
index fefbe52ce..ea87715c9 100644
--- a/tex/context/base/strc-lst.lua
+++ b/tex/context/base/strc-lst.lua
@@ -19,6 +19,8 @@ local lpegmatch = lpeg.match
local trace_lists = false trackers.register("structure.lists", function(v) trace_lists = v end)
+local report_lists = logs.new("lists")
+
local ctxcatcodes = tex.ctxcatcodes
structure.lists = structure.lists or { }
@@ -38,6 +40,9 @@ lists.tobesaved = lists.tobesaved or { }
lists.enhancers = lists.enhancers or { }
lists.internals = lists.internals or { }
lists.ordered = lists.ordered or { }
+lists.cached = lists.cached or { }
+
+local cached, pushed = lists.cached, { }
local variables = interfaces.variables
local matching_till_depth, number_at_depth = sections.matching_till_depth, sections.number_at_depth
@@ -81,8 +86,6 @@ if job then
job.register('structure.lists.collected', structure.lists.tobesaved, initializer)
end
-local cached, pushed = { }, { }
-
function lists.push(t)
local r = t.references
local i = (r and r.internal) or 0 -- brrr
@@ -117,6 +120,7 @@ function lists.enhance(n)
if enhancer then
enhancer(l)
end
+ return l
end
end
@@ -144,7 +148,7 @@ local function filter_collected(names, criterium, number, collected, nested)
local hash, result, all, detail = { }, { }, not names or names == "" or names == variables.all, nil
names, criterium = gsub(names," ",""), gsub(criterium," ","")
if trace_lists then
- logs.report("lists","filtering names: %s, criterium: %s, number: %s",names,criterium,number or "-")
+ report_lists("filtering names: %s, criterium: %s, number: %s",names,criterium,number or "-")
end
if not all then
for s in gmatch(names,"[^, ]+") do -- sort of settings to hash
@@ -308,9 +312,9 @@ local function filter_collected(names, criterium, number, collected, nested)
end
if trace_lists then
if detail then
- logs.report("lists","criterium: %s, %s, found: %s",criterium,detail,#result)
+ report_lists("criterium: %s, %s, found: %s",criterium,detail,#result)
else
- logs.report("lists","criterium: %s, found: %s",criterium,#result)
+ report_lists("criterium: %s, found: %s",criterium,#result)
end
end
return result
diff --git a/tex/context/base/strc-lst.mkiv b/tex/context/base/strc-lst.mkiv
index 413052882..2a97709b4 100644
--- a/tex/context/base/strc-lst.mkiv
+++ b/tex/context/base/strc-lst.mkiv
@@ -88,6 +88,11 @@
userdata = structure.helpers.touserdata(\!!bs\detokenize{#3}\!!es)
}}}%
\expanded{\ctxlatelua{structure.lists.enhance(\currentlistnumber)}}%
+ % new from here
+ \xdef\currentstructurelistattribute{\ctxlua{tex.write(jobreferences.setinternalreference(nil,nil,\nextinternalreference))}}%
+ \xdef\currentdestinationattribute{\number\lastdestinationattribute}%
+ \begingroup\attribute\destinationattribute\currentdestinationattribute\hbox{}\endgroup % todo
+ % end of new
\endgroup}
\def\structurelistlocation
@@ -130,7 +135,7 @@
\unexpanded\def\placestructurelist#1#2#3% hm ... [][][]
{\ctxlua{structure.lists.process("#1","#2","#3")}}
-\def\analysestructurelist#1#2#3%
+\unexpanded\def\analysestructurelist#1#2#3%
{\ctxlua{structure.lists.analyze("#1","#2","#3")}}
\def\firststructureelementinlist#1%
@@ -141,7 +146,7 @@
\def\@@structurelistprocess{structurelist:process:}
-\def\installstructurelistprocessor#1#2%
+\unexpanded\def\installstructurelistprocessor#1#2%
{\expandafter\def\csname\@@structurelistprocess#1\endcsname{#2}}
\def\usestructurelistprocessor#1%
@@ -158,7 +163,7 @@
% \chapter{Two} \section{First} \section{Second}
% \stoptext
-\def\processlistofstructure#1#2#3% name, method, n
+\unexpanded\def\processlistofstructure#1#2#3% name, method, n
{\ctxlua{structure.lists.pushnesting(#3)}%
\edef\currentlist {#1}%
\edef\currentlistmethod{#2}%
@@ -324,20 +329,19 @@
% writing to lists
-\def\writetolist[#1]{\gobbletwoarguments}
\let\dowritetolist \gobblefourarguments
\let\dodowritetolist\gobblefourarguments
-\def\writebetweenlist[#1]#2%
+\unexpanded\def\writebetweenlist[#1]#2%
{\doif{\namedlistparameter{#1}\c!state}\v!start{\structurelistinject[#1][command][command={#2}]}}
-\def\writedatatolist
+\unexpanded\def\writedatatolist
{\dodoubleargument\dowritedatatolist}
\def\dowritedatatolist[#1][#2]%
{\doif{\namedlistparameter{#1}\c!state}\v!start{\structurelistinject[#1][userdata][#2]}}
-\def\writetolist[#1]#2#3%
+\unexpanded\def\writetolist[#1]#2#3%
{\doif{\namedlistparameter{#1}\c!state}\v!start{\structurelistinject[#1][simple][first={#2},second={#3}]}}
\installstructurelistprocessor{simple}
@@ -362,11 +366,11 @@
\endgroup
\dosetlistmode}
-\def\dosetlistmode % utilitydone will disappear
+\def\dosetlistmode
{\ifcase\structurelistsize\relax
- \utilitydonefalse \resetsystemmode\v!list
+ \resetsystemmode\v!list
\else
- \utilitydonetrue \setsystemmode \v!list
+ \setsystemmode \v!list
\fi}
\unexpanded\def\systemsuppliedchapter {\getvalue{\v!chapter}} % brrr
@@ -379,7 +383,7 @@
\def\docompletelist[#1][#2]%
{\dodocompletelist[#1][#1][#2]}
-\def\completelist
+\unexpanded\def\completelist
{\dodoubleempty\docompletelist}
\def\listelements {} % list of page breaks
@@ -390,7 +394,7 @@
\def\doassigndimen#1#2#3%
{\doifinsetelse{#2}{\v!fit,\v!broad}{#1=#3}{#1=#2}\relax}
-\def\listsymbol[#1]#2%
+\unexpanded\def\listsymbol[#1]#2%
{\begingroup
\edef\currentlist{#1}%
\edef\currentlistnumber{#2}%
@@ -539,8 +543,7 @@
\endgroup
\dontcomplain
%\setfullsectionnumber{\??li\currentlist}% todo
- \dosomelistelement{#1}{#2}{#3}{#4}{#5}{#6}%
- \global\utilitydonetrue} % ?
+ \dosomelistelement{#1}{#2}{#3}{#4}{#5}{#6}}
\def\dodocommandlistelement#1#2#3#4#5#6%
{\doifdefinedelse{\??li#1\c!command}
@@ -829,7 +832,7 @@
\endgroup
\dosetlistmode}
-\def\determinelistcharacteristics
+\unexpanded\def\determinelistcharacteristics
{\dodoubleempty\dodeterminelistcharacteristics}
\def\combinedlistparameter#1{\csname\??ih\currentcombinedlist#1\endcsname}
diff --git a/tex/context/base/strc-mat.mkiv b/tex/context/base/strc-mat.mkiv
index 2064db2c5..3728913cb 100644
--- a/tex/context/base/strc-mat.mkiv
+++ b/tex/context/base/strc-mat.mkiv
@@ -129,11 +129,17 @@
\def\doplacecurrentformulanumber
{\dohandlecurrentformulareferences
- %\currentformulasattribute % todo
- %\currentformulasattribute % todo
- %\currentsubformulaattribute % todo
\labeltexts\currentformula{\doconvertedstructurecounter[\v!formula][]}}
+% \def\theboxdestinationattribute#1{\iflocation\ifx#1\relax\else\ifx#1\empty\else attr \destinationattribute#1\fi\fi\fi}
+% \def\thedestinationattribute #1{\iflocation\ifx#1\relax\else\ifx#1\empty\else \attribute\destinationattribute#1\fi\fi\fi}
+
+\def\theformuladestinationattribute#1%
+ {\iflocation\ifx#1\relax\else\ifx#1\empty\else
+ \attribute\destinationattribute#1%
+ \glet#1\relax
+ \fi\fi\fi}
+
\appendtoks
\glet\currentplaceformulasynchronize \relax
\glet\currentformulassynchronize \relax
@@ -142,6 +148,8 @@
\let\currentformula\empty
\to \everyresetformulas
+% currently we do the number, some day we will do the (sub) formula
+
\def\dohandlecurrentformulareferences
{\ifnum\placeformulanumbermode=\plusthree
\storecurrentformulanumber
@@ -152,6 +160,7 @@
\currentplaceformulaattribute
\currentplaceformulasynchronize
\glet\currentplaceformulasynchronize\relax
+\theformuladestinationattribute\currentplaceformulaattribute
\fi
\ifnum\formulasnumbermode=\plusthree
\storecurrentformulanumber
@@ -162,6 +171,7 @@
\currentformulasattribute
\currentformulassynchronize
\glet\currentformulassynchronize\relax
+\theformuladestinationattribute\currentformulasattribute
\fi
\ifnum\subformulasnumbermode=\plusthree
\currentsubformulassynchronize
@@ -176,17 +186,21 @@
\currentnestedformulaattribute
\currentnestedformulasynchronize
\glet\currentnestedformulasynchronize\relax
+\theformuladestinationattribute\currentnestedformulaattribute
\fi}
+% needs checking ... too many:
+
+\let\currentplaceformulaattribute\relax \let\currentplaceformulasynchronize\relax \let\currentplaceformulanumber\relax
+\let\currentformulaattribute \relax \let\currentformulasynchronize \relax \let\currentformulanumber \relax
+\let\currentsubformulaattribute \relax \let\currentsubformulasynchronize \relax \let\currentsubformulanumber \relax
+\let\currentformulasattribute \relax \let\currentformulassynchronize \relax \let\currentformulasnumber \relax
+
\let\currentformulasreference \empty \let\currentformulassuffix \empty
\let\currentformulareference \empty \let\currentformulasuffix \empty
\let\currentsubformulareference \empty \let\currentsubformulasuffix \empty
\let\currentnestedformulareference\empty \let\currentnestedformulasuffix\empty
-\let\currentformulasynchronize \relax \let\currentformulaattribute \relax
-\let\currentsubformulasynchronize\relax \let\currentsubformulaattribute\relax
-\let\currentformulassynchronize \relax \let\currentformulasattribute \relax
-
\def\dohandleformulanumbering
{\doincrementsubstructurecounter[\v!formula][1]%
\doiftext\currentplaceformulasuffix{\setsubstructurecounterown[\v!formula][2]{\currentplaceformulasuffix}}%
diff --git a/tex/context/base/strc-not.lua b/tex/context/base/strc-not.lua
index be883af57..1e761d657 100644
--- a/tex/context/base/strc-not.lua
+++ b/tex/context/base/strc-not.lua
@@ -14,6 +14,8 @@ local ctxcatcodes = tex.ctxcatcodes
local trace_notes = false trackers.register("structure.notes", function(v) trace_notes = v end)
+local report_notes = logs.new("notes")
+
structure = structure or { }
structure.helpers = structure.helpers or { }
structure.lists = structure.lists or { }
@@ -43,12 +45,12 @@ function notes.store(tag,n)
nd = { }
notedata[tag] = nd
end
- local nnd = #nd+1
+ local nnd = #nd + 1
nd[nnd] = n
local state = notestates[tag]
if state.kind ~= "insert" then
if trace_notes then
- logs.report("notes","storing %s with state %s as %s",tag,state.kind,nnd)
+ report_notes("storing %s with state %s as %s",tag,state.kind,nnd)
end
state.start = state.start or nnd
end
@@ -62,9 +64,11 @@ local function get(tag,n)
nd = nd[n]
if nd then
if trace_notes then
- logs.report("notes","getting %s of %s",n,tag)
+ report_notes("getting note %s of '%s'",n,tag)
end
- return structure.lists.collected[nd]
+ -- is this right?
+ local newdata = structure.lists.collected[nd]
+ return newdata
end
end
end
@@ -92,7 +96,7 @@ function notes.save(tag,newkind)
local state = notestates[tag]
if state and not state.saved then
if trace_notes then
- logs.report("notes","saving state of %s: %s -> %s",tag,state.kind,newkind or state.kind)
+ report_notes("saving state of '%s': %s -> %s",tag,state.kind,newkind or state.kind)
end
state.saved = notedata[tag]
state.savedkind = state.kind
@@ -105,7 +109,7 @@ function notes.restore(tag,forcedstate)
local state = notestates[tag]
if state and state.saved then
if trace_notes then
- logs.report("notes","restoring state of %s: %s -> %s",tag,state.kind,state.savedkind)
+ report_notes("restoring state of '%s': %s -> %s",tag,state.kind,state.savedkind)
end
state.saved = nil
state.kind = forcedstate or state.savedkind
@@ -116,7 +120,7 @@ end
function notes.setstate(tag,newkind)
local state = notestates[tag]
if trace_notes then
- logs.report("notes","setting state of %s from %s to %s",tag,(state and state.kind) or "unset",newkind)
+ report_notes("setting state of '%s' from %s to %s",tag,(state and state.kind) or "unset",newkind)
end
if not state then
state = {
@@ -236,7 +240,7 @@ end
function notes.postpone()
if trace_notes then
- logs.report("notes","postponing all insert notes")
+ report_notes("postponing all insert notes")
end
for tag, state in next, notestates do
if state.kind ~= "store" then
@@ -246,16 +250,21 @@ function notes.postpone()
end
function notes.setsymbolpage(tag,n)
- local nd = get(tag,n)
- if nd then
- nd.metadata.symbolpage = texcount.realpageno
+ local l = notes.listindex(tag,n)
+ local p = texcount.realpageno
+ if trace_notes then
+ report_notes("note %s of '%s' with list index %s gets page %s",n,tag,l,p)
end
+ lists.cached[l].references.symbolpage = p
end
function notes.getsymbolpage(tag,n)
local nd = get(tag,n)
- nd = nd and nd.metadata.symbolpage
- texwrite(nd or 0)
+ local p = nd and nd.references.symbolpage or 0
+ if trace_notes then
+ report_notes("note %s of '%s' has page %s",n,tag,p)
+ end
+ texwrite(p)
end
function notes.getnumberpage(tag,n)
@@ -275,7 +284,7 @@ function notes.flush(tag,whatkind) -- store and postpone
if kind == "postpone" then
if nd and ns then
if trace_notes then
- logs.report("notes","flushing state %s of %s from %s to %s",whatkind,tag,ns,#nd)
+ report_notes("flushing state %s of %s from %s to %s",whatkind,tag,ns,#nd)
end
for i=ns,#nd do
texsprint(ctxcatcodes,format("\\handlenoteinsert{%s}{%s}",tag,i))
@@ -286,7 +295,7 @@ function notes.flush(tag,whatkind) -- store and postpone
elseif kind == "store" then
if nd and ns then
if trace_notes then
- logs.report("notes","flushing state %s of %s from %s to %s",whatkind,tag,ns,#nd)
+ report_notes("flushing state %s of %s from %s to %s",whatkind,tag,ns,#nd)
end
for i=ns,#nd do
texsprint(ctxcatcodes,format("\\handlenoteitself{%s}{%s}",tag,i))
@@ -296,21 +305,21 @@ function notes.flush(tag,whatkind) -- store and postpone
elseif kind == "reset" then
if nd and ns then
if trace_notes then
- logs.report("notes","flushing state %s of %s from %s to %s",whatkind,tag,ns,#nd)
+ report_notes("flushing state %s of %s from %s to %s",whatkind,tag,ns,#nd)
end
end
state.start = nil
elseif trace_notes then
- logs.report("notes","not flushing state %s of %s",whatkind,tag)
+ report_notes("not flushing state %s of %s",whatkind,tag)
end
elseif trace_notes then
- logs.report("notes","not flushing state %s of %s",whatkind,tag)
+ report_notes("not flushing state %s of %s",whatkind,tag)
end
end
function notes.flushpostponed()
if trace_notes then
- logs.report("notes","flushing all postponed notes")
+ report_notes("flushing all postponed notes")
end
for tag, _ in next, notestates do
notes.flush(tag,"postpone")
@@ -319,7 +328,7 @@ end
function notes.resetpostponed()
if trace_notes then
- logs.report("notes","resetting all postponed notes")
+ report_notes("resetting all postponed notes")
end
for tag, state in next, notestates do
if state.kind == "postpone" then
diff --git a/tex/context/base/strc-not.mkiv b/tex/context/base/strc-not.mkiv
index 45e37b276..751776326 100644
--- a/tex/context/base/strc-not.mkiv
+++ b/tex/context/base/strc-not.mkiv
@@ -699,7 +699,7 @@
\domovednote\currentdescription\currentdescriptionnumberentry\v!nextpage\v!previouspage}}
\def\synchronizesomenotesymbol#1#2% called more often than needed
- {\expanded{\noexpand\ctxlatelua{structure.notes.setsymbolpage("#1",#2)}}}
+ {\normalexpanded{\noexpand\ctxlatelua{structure.notes.setsymbolpage("#1",#2)}}}
\def\handlenoteinsert#1#2%
{\begingroup
diff --git a/tex/context/base/strc-num.lua b/tex/context/base/strc-num.lua
index 8165d0786..e32b5e781 100644
--- a/tex/context/base/strc-num.lua
+++ b/tex/context/base/strc-num.lua
@@ -13,6 +13,8 @@ local texsprint, texcount = tex.sprint, tex.count
local trace_counters = false trackers.register("structure.counters", function(v) trace_counters = v end)
+local report_counters = logs.new("counters")
+
structure = structure or { }
structure.helpers = structure.helpers or { }
structure.sections = structure.sections or { }
@@ -108,7 +110,7 @@ local function constructor(t,s,name,i)
end
end
-local enhance = function()
+local function enhance()
for name, cd in next, counterdata do
local data = cd.data
for i=1,#data do
@@ -275,14 +277,14 @@ local function synchronize(name,d)
local dc = d.counter
if dc then
if trace_counters then
- logs.report("counters","setting counter %s with name %s to %s",dc,name,d.number)
+ report_counters("setting counter %s with name %s to %s",dc,name,d.number)
end
tex.setcount("global",dc,d.number)
end
local cs = counterspecials[name]
if cs then
if trace_counters then
- logs.report("counters","invoking special for name %s",name)
+ report_counters("invoking special for name %s",name)
end
cs()
end
@@ -390,10 +392,10 @@ end
function counters.check(level) -- not used (yet)
for name, cd in next, counterdata do
- -- logs.report("counters","%s %s %s",name,cd.level,level)
+ -- report_counters("%s %s %s",name,cd.level,level)
if cd.level == level then
if trace_counters then
- logs.report("counters","resetting %s at level %s",name,level)
+ report_counters("resetting %s at level %s",name,level)
end
counters.reset(name)
end
diff --git a/tex/context/base/strc-pag.lua b/tex/context/base/strc-pag.lua
index 261059587..ccaef6d72 100644
--- a/tex/context/base/strc-pag.lua
+++ b/tex/context/base/strc-pag.lua
@@ -13,6 +13,8 @@ local texsprint, texwrite = tex.sprint, tex.write
local trace_pages = false trackers.register("structure.pages", function(v) trace_pages = v end)
+local report_pages = logs.new("pages")
+
structure.pages = structure.pages or { }
local helpers = structure.helpers or { }
@@ -43,7 +45,7 @@ function pages.save(prefixdata,numberdata)
local realpage, userpage = texcount.realpageno, texcount.userpageno
if realpage > 0 then
if trace_pages then
- logs.report("pages","saving page %s.%s",realpage,userpage)
+ report_pages("saving page %s.%s",realpage,userpage)
end
local data = {
number = userpage,
@@ -56,7 +58,7 @@ function pages.save(prefixdata,numberdata)
collected[realpage] = data
end
elseif trace_pages then
- logs.report("pages","not saving page %s.%s",realpage,userpage)
+ report_pages("not saving page %s.%s",realpage,userpage)
end
end
@@ -67,7 +69,7 @@ function structure.counters.specials.userpage()
if t then
t.number = texcount.userpageno
if trace_pages then
- logs.report("pages","forcing pagenumber of realpage %s to %s",r,t.number)
+ report_pages("forcing pagenumber of realpage %s to %s",r,t.number)
end
end
end
@@ -108,7 +110,7 @@ function pages.number(realdata,pagespec)
texsprint(ctxcatcodes,format("\\convertnumber{%s}{%s}",conversion,userpage))
else
if conversionset == "" then conversionset = "default" end
- local theconversion = sets.get("structure:conversions",block,conversionset,index,"numbers")
+ local theconversion = sets.get("structure:conversions",block,conversionset,1,"numbers") -- to be checked: 1
processors.sprint(ctxcatcodes,theconversion,convertnumber,userpage)
end
if stopper ~= "" then
@@ -205,7 +207,7 @@ function helpers.analyse(entry,specification)
if not section then
return entry, false, "no section"
end
- sectiondata = jobsections.collected[references.section]
+ local sectiondata = jobsections.collected[references.section]
if not sectiondata then
return entry, false, "no section data"
end
diff --git a/tex/context/base/strc-pag.mkiv b/tex/context/base/strc-pag.mkiv
index 0f3d7ba3b..0e5f5323e 100644
--- a/tex/context/base/strc-pag.mkiv
+++ b/tex/context/base/strc-pag.mkiv
@@ -75,7 +75,6 @@
% \stopbodymatter
% \stoptext
-
\definestructurecounter[\s!realpage][\c!prefix=\v!no,\c!start=1,\c!prefixsegments=,\s!counter=realpageno]
\definestructurecounter[\s!userpage][\c!prefix=\v!no,\c!start=1,\c!prefixsegments=,\s!counter=userpageno]
\definestructurecounter[\s!subpage] [\c!prefix=\v!no,\c!start=1,\c!prefixsegments=,\s!counter=subpageno]
diff --git a/tex/context/base/strc-ref.lua b/tex/context/base/strc-ref.lua
index 475ab318a..f9b484173 100644
--- a/tex/context/base/strc-ref.lua
+++ b/tex/context/base/strc-ref.lua
@@ -12,6 +12,8 @@ local texsprint, texwrite, texcount, texsetcount = tex.sprint, tex.write, tex.co
local trace_referencing = false trackers.register("structure.referencing", function(v) trace_referencing = v end)
+local report_references = logs.new("references")
+
local ctxcatcodes = tex.ctxcatcodes
local variables = interfaces.variables
local constants = interfaces.constants
@@ -226,7 +228,7 @@ local function register_from_lists(collected,derived)
local t = { kind, i }
for s in gmatch(reference,"%s*([^,]+)") do
if trace_referencing then
- logs.report("referencing","list entry %s provides %s reference '%s' on realpage %s",i,kind,s,realpage)
+ report_references("list entry %s provides %s reference '%s' on realpage %s",i,kind,s,realpage)
end
d[s] = t -- share them
end
@@ -633,7 +635,7 @@ local function resolve(prefix,reference,args,set) -- we start with prefix,refere
set.has_tex = true
end
else
- -- logs.report("references","funny pattern: %s",ri or "?")
+ -- report_references("funny pattern: %s",ri or "?")
end
end
end
@@ -988,17 +990,17 @@ function jobreferences.filter(name,...) -- number page title ...
filter = filter and (filter[name] or filter.unknown or filters.generic[name] or filters.generic.unknown)
if filter then
if trace_referencing then
- logs.report("referencing","name '%s', kind '%s', using dedicated filter",name,kind)
+ report_references("name '%s', kind '%s', using dedicated filter",name,kind)
end
filter(data,name,...)
elseif trace_referencing then
- logs.report("referencing","name '%s', kind '%s', using generic filter",name,kind)
+ report_references("name '%s', kind '%s', using generic filter",name,kind)
end
elseif trace_referencing then
- logs.report("referencing","name '%s', unknown kind",name)
+ report_references("name '%s', unknown kind",name)
end
elseif trace_referencing then
- logs.report("referencing","name '%s', no reference",name)
+ report_references("name '%s', no reference",name)
end
end
diff --git a/tex/context/base/strc-ref.mkiv b/tex/context/base/strc-ref.mkiv
index 8290a1b13..921f5927e 100644
--- a/tex/context/base/strc-ref.mkiv
+++ b/tex/context/base/strc-ref.mkiv
@@ -118,7 +118,14 @@
\let\dofinishpagereference\dofinishfullreference
\let\dofinishuserreference\dofinishfullreference
-\def\dodosetreference#1#2#3#4% kind labels userdata text -> todo: userdata
+\def\dodosetreference
+ {\ifreferencing
+ \expandafter\dododosetreference
+ \else
+ \expandafter\gobblefourarguments
+ \fi}
+
+\def\dododosetreference#1#2#3#4% kind labels userdata text -> todo: userdata
{\ifreferencing
\edef\currentreferencekind{#1}%
\edef\currentreferencelabels{#2}%
@@ -917,8 +924,7 @@
\c!label=, % can be {left}{right}
\c!command=\in,
#2]%
- \setuvalue{#1}%
- {\dontleavehmode\doexecutereferenceformat{#1}}%
+ \setuvalue{#1}{\dontleavehmode\doexecutereferenceformat{#1}}%
\fi}
\def\noexecutelabelreferenceformat#1%
@@ -933,12 +939,21 @@
\def\doexecutereferenceformat#1%
{\gdef\leftofreference {\csname\??rf#1\c!left \endcsname}%
\gdef\rightofreference{\csname\??rf#1\c!right\endcsname}%
- \global\let\textofreference\empty % otherwise ~ added
+ \glet\textofreference\empty % otherwise ~ added
\doifelsevaluenothing{\??rf#1\c!label}\noexecutelabelreferenceformat\doexecutelabelreferenceformat{#1}}
-\let\leftofreference \relax
-\let\rightofreference\relax
-\let\textofreference \relax
+\newtoks\everyresetreferenceformat
+
+\def\resetreferenceformat
+ {\the\everyresetreferenceformat}
+
+\appendtoks
+ \glet\leftofreference \relax
+ \glet\rightofreference\relax
+ \glet\textofreference \relax
+\to \everyresetreferenceformat
+
+\resetreferenceformat
% fails on metafun {\leftofreference#1\ignorespaces#3\removeunwantedspaces\rightofreference}{#2}[#4]%
%
@@ -973,7 +988,7 @@
\unexpanded\def\dospecialin{\let\currentreferencecontent\currentreferencedefault\doinatreference}
\unexpanded\def\dospecialat{\let\currentreferencecontent\currentreferencepage \doinatreference}
-\newtoks\leftreferencetoks
+\newtoks\leftreferencetoks % needs a reset too?
\newtoks\rightreferencetoks
\def\doinatreference
@@ -1022,6 +1037,7 @@
\doifreferencefoundelse{#4}
{\doifelsenothing{#1}\dosymbolreference\dowantedreference{#1}{#2}[#4]}%
{\dounknownreference{#1}{#2}[#4]}%
+ \resetreferenceformat
\endgroup}
\let\dosymbolreference\dowantedreference
diff --git a/tex/context/base/strc-reg.lua b/tex/context/base/strc-reg.lua
index c5b2c9374..c489ff6b5 100644
--- a/tex/context/base/strc-reg.lua
+++ b/tex/context/base/strc-reg.lua
@@ -14,6 +14,8 @@ local lpegmatch = lpeg.match
local trace_registers = false trackers.register("structure.registers", function(v) trace_registers = v end)
+local report_registers = logs.new("registers")
+
local ctxcatcodes = tex.ctxcatcodes
local variables = interfaces.variables
@@ -157,9 +159,9 @@ local function filter_collected(names,criterium,number,collected,prevmode)
end
if trace_registers then
if detail then
- logs.report("registers","criterium: %s, %s, found: %s",criterium,detail,#result)
+ report_registers("criterium: %s, %s, found: %s",criterium,detail,#result)
else
- logs.report("registers","criterium: %s, found: %s",criterium,#result)
+ report_registers("criterium: %s, found: %s",criterium,#result)
end
end
return result
@@ -397,7 +399,7 @@ function jobregisters.finalize(data,options)
local entry, tag = sorters.firstofsplit(v)
if tag ~= lasttag then
if trace_registers then
- logs.report("registers","splitting at %s",tag)
+ report_registers("splitting at %s",tag)
end
d = { }
s = { tag = tag, data = d }
diff --git a/tex/context/base/strc-reg.mkiv b/tex/context/base/strc-reg.mkiv
index fcc37549c..529e8cd1e 100644
--- a/tex/context/base/strc-reg.mkiv
+++ b/tex/context/base/strc-reg.mkiv
@@ -770,8 +770,7 @@
% \setvalue{#1\s!entry }##1{\dosetpageregisterletter{#1}{##1}}}
% \def\dosetlinkregisterentrya#1#2%
-% {\global\utilitydonetrue
-% \c!entryletter
+% {\c!entryletter
% \iflocation
% \getalllistreferences{#1}{#2}%
% % no \endgraf
@@ -864,8 +863,7 @@
% \ifautoregisterhack
% \setvalue{#1\s!page}##1##2##3##4%
% {\doifreglevelelse[##3]
-% {\global\utilitydonetrue
-% \iffirstregisterpage
+% {\iffirstregisterpage
% \@EA\xdef\csname\??id#1\??id\currentregisterentry\endcsname
% {\internallistreference::##4}%
% \else % catches errors in index
@@ -878,8 +876,7 @@
% \else
% \setvalue{#1\s!page}##1##2##3##4%
% {\doifreglevelelse[##3]
-% {\global\utilitydonetrue
-% \iffirstregisterpage
+% {\iffirstregisterpage
% \global\firstregisterpagefalse
% \@EA\xdef\csname\??id#1\??id\currentregisterentry\endcsname
% {\internallistreference::##2-##4}%
@@ -1026,8 +1023,7 @@
% \setvalue{#1\s!entry }##1{\dosetpageregisterletter{#1}{##1}}}
% \def\dosetautoregisterentrya#1#2%
-% {\global\utilitydonetrue
-% \c!entryletter
+% {\c!entryletter
% \iflocation
% \getalllistreferences{#1}{#2}%
% \endgraf\hangindent1em\noindent\c!entryreference
diff --git a/tex/context/base/strc-sec.mkiv b/tex/context/base/strc-sec.mkiv
index 35927d98a..1968cbae9 100644
--- a/tex/context/base/strc-sec.mkiv
+++ b/tex/context/base/strc-sec.mkiv
@@ -354,7 +354,11 @@
\normalexpanded{\noexpand\setmarking[\currentstructureheadcoupling]{\currentstructurelistnumber}}%
\currentstructuresynchronize}
-\unexpanded\def\fullstructureheadnumber{\labeltexts{\structureheadparameter\c!label}{\structurenumber}} % todo
+% \unexpanded\def\fullstructureheadnumber{\labeltexts{\structureheadparameter\c!label}{\structurenumber}} % todo
+
+\unexpanded\def\fullstructureheadnumber
+ {\edef\currentstructureheadlabeltag{\currentstructureblock\c!label}%
+ \labeltexts{\structureheadparameter\currentstructureheadlabeltag}{\structurenumber}}
% \def\fullstructureheadtitle {\structurevariable{titledata.title}} % no catcode!
% \unexpanded\def\fullstructureheadtitle{\structureautocatcodedget{titledata.title}{\structureheadparameter\s!catcodes}}
diff --git a/tex/context/base/supp-fil.lua b/tex/context/base/supp-fil.lua
index 8d69f64a7..e289f530a 100644
--- a/tex/context/base/supp-fil.lua
+++ b/tex/context/base/supp-fil.lua
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['supp-fil'] = {
license = "see context related readme files"
}
+-- This module will be redone !
+
--[[ldx--
<p>It's more convenient to manipulate filenames (paths) in
<l n='lua'/> than in <l n='tex'/>. These methods have counterparts
@@ -17,11 +19,11 @@ local texsprint, texwrite, ctxcatcodes = tex.sprint, tex.write, tex.ctxcatcodes
local trace_modules = false trackers.register("modules.loading", function(v) trace_modules = v end)
+local report_modules = logs.new("modules")
+
support = support or { }
environment = environment or { }
-environment.outputfilename = environment.outputfilename or environment.jobname
-
function support.checkfilename(str) -- "/whatever..." "c:..." "http://..."
commands.chardef("kindoffile",boolean.tonumber(find(str,"^/") or find(str,"[%a]:")))
end
@@ -155,26 +157,26 @@ local prefixes = { "m", "p", "s", "x", "t" }
local suffixes = { "tex", "mkiv" }
local modstatus = { }
-local function usemodule(name,hassheme)
+local function usemodule(name,hasscheme)
local foundname
if hasscheme then
-- no auto suffix as http will return a home page or error page
-- so we only add one if missing
local fullname = file.addsuffix(name,"tex")
if trace_modules then
- logs.report("modules","checking scheme driven file '%s'",fullname)
+ report_modules("checking scheme driven file '%s'",fullname)
end
foundname = resolvers.findtexfile(fullname) or ""
elseif file.extname(name) ~= "" then
if trace_modules then
- logs.report("modules","checking suffix driven file '%s'",name)
+ report_modules("checking suffix driven file '%s'",name)
end
foundname = support.readfilename(name,false,true) or ""
else
for i=1,#suffixes do
local fullname = file.addsuffix(name,suffixes[i])
if trace_modules then
- logs.report("modules","checking suffix driven file '%s'",fullname)
+ report_modules("checking suffix driven file '%s'",fullname)
end
foundname = support.readfilename(fullname,false,true) or ""
if foundname ~= "" then
@@ -184,7 +186,7 @@ local function usemodule(name,hassheme)
end
if foundname ~= "" then
if trace_modules then
- logs.report("modules","loading '%s'",foundname)
+ report_modules("loading '%s'",foundname)
end
context.startreadingfile()
context.input(foundname)
@@ -205,7 +207,7 @@ function support.usemodules(prefix,askedname,truename)
status = status + 1
else
if trace_modules then
- logs.report("modules","locating '%s'",truename)
+ report_modules("locating '%s'",truename)
end
local hasscheme = url.hasscheme(truename)
if hasscheme then
@@ -241,7 +243,7 @@ function support.usemodules(prefix,askedname,truename)
end
if status == 0 then
if trace_modules then
- logs.report("modules","skipping '%s' (not found)",truename)
+ report_modules("skipping '%s' (not found)",truename)
else
interfaces.showmessage("systems",6,askedname)
end
@@ -251,7 +253,7 @@ function support.usemodules(prefix,askedname,truename)
end
else
if trace_modules then
- logs.report("modules","skipping '%s' (already loaded)",truename)
+ report_modules("skipping '%s' (already loaded)",truename)
else
interfaces.showmessage("systems",7,askedname)
end
diff --git a/tex/context/base/supp-num.tex b/tex/context/base/supp-num.tex
index d192ab548..742349753 100644
--- a/tex/context/base/supp-num.tex
+++ b/tex/context/base/supp-num.tex
@@ -236,27 +236,6 @@
%D of a digit. Watch the \type {\mathaxisheight} trickery (this
%D font related register stored the math axis).
-% \def\scandigits#1%
-% {\if#1.\doscandigit1\chardef\skipdigit0\else
-% \if#1,\doscandigit2\chardef\skipdigit0\else
-% \if#1@\hphantom{0}\chardef\skipdigit1\else
-% \if#1_\hphantom{0}\chardef\skipdigit1\else
-% \if#1/\digitsgn{\hphantom{+}}\chardef\skipdigit0\else
-% \if#1-\digitsgn-\chardef\skipdigit0\else
-% \if#1+\digitsgn+\chardef\skipdigit0\else
-% \if#1=\digitsgn\zeroamount\chardef\skipdigit0\else
-% \if#1s\digitsgn{\hphantom{\positive}}\chardef\skipdigit0\else
-% \if#1p\digitsgn\positive\chardef\skipdigit0\else
-% \if#1m\digitsgn\negative\chardef\skipdigit0\else
-% \if#1n\digitsgn\negative\chardef\skipdigit0\else
-% #1\chardef\skipdigit0\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi}
-
-% \def\digitsep#1%
-% {\doscandigit#1\chardef\skipdigit0}
-%
-% \def\digitnop
-% {\hphantom{0}\chardef\skipdigit1}
-
% 0,=
% 0,== second = results in delta(00,=)
% 0,- is invalid, should be =
diff --git a/tex/context/base/supp-ran.lua b/tex/context/base/supp-ran.lua
index fe635fc7f..76e57e933 100644
--- a/tex/context/base/supp-ran.lua
+++ b/tex/context/base/supp-ran.lua
@@ -9,6 +9,8 @@ if not modules then modules = { } end modules ['supp-ran'] = {
-- We cannot ask for the current seed, so we need some messy hack
-- here.
+local report_system = logs.new("system")
+
commands = commands or { }
local random, randomseed, round, seed, last = math.random, math.randomseed, math.round, false, 1
@@ -20,7 +22,7 @@ function math.setrandomseedi(n,comment)
end
n = round(n)
if false then
- logs.report("system","setting random seed to %s (%s)",n,comment or "normal")
+ report_system("setting random seed to %s (%s)",n,comment or "normal")
end
randomseed(n)
last = random(0,1073741823) -- we need an initial value
diff --git a/tex/context/base/syst-lua.lua b/tex/context/base/syst-lua.lua
index 640282953..6711bf737 100644
--- a/tex/context/base/syst-lua.lua
+++ b/tex/context/base/syst-lua.lua
@@ -14,27 +14,11 @@ local ctxcatcodes = tex.ctxcatcodes
commands = commands or { } cs = commands -- shorter
-function commands.writestatus(a,b,c,...)
- if c then
- texiowrite_nl(format("%-16s: %s\n",a,format(b,c,...)))
- else
- texiowrite_nl(format("%-16s: %s\n",a,b)) -- b can have %'s
- end
-end
-function commands.writedebug(a,b,c,...)
- if c then
- texiowrite_nl(format("%-16s| %s\n",a,format(b,c,...)))
- else
- texiowrite_nl(format("%-16s| %s\n",a,b)) -- b can have %'s
- end
-end
-
-function commands.report(s,t,...)
- commands.writestatus("!"..s,format(t,...))
-end
+function commands.writereport(...) logs.report(...) end -- not that efficient
+function commands.writestatus(...) logs.status(...) end
local function testcase(b)
- if b then -- faster with if than with expression
+ if b then -- looks faster with if than with expression
texsprint(ctxcatcodes,"\\firstoftwoarguments")
else
texsprint(ctxcatcodes,"\\secondoftwoarguments")
@@ -51,6 +35,7 @@ function commands.doif(b)
texsprint(ctxcatcodes,"\\gobbleoneargument")
end
end
+
function commands.doifnot(b)
if b then
texsprint(ctxcatcodes,"\\gobbleoneargument")
diff --git a/tex/context/base/syst-mes.mkiv b/tex/context/base/syst-mes.mkiv
new file mode 100644
index 000000000..310f21040
--- /dev/null
+++ b/tex/context/base/syst-mes.mkiv
@@ -0,0 +1,38 @@
+%D \module
+%D [ file=syst-mes,
+%D version=2010.06.03,
+%D title=\CONTEXT\ System Macros,
+%D subtitle=Messages,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright=PRAGMA]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\chardef\statuswidth=15
+\chardef\statuswrite=16
+
+\newtoks\everywritestring
+
+\def\writedirect {\immediate\write\statuswrite}
+\def\writeline {\writedirect{}}
+\def\writestring#1{\begingroup\the\everywritestring\writedirect{#1}\endgroup}
+\let\writebanner \writestring
+\let\message \normalmessage
+
+\ifx\normalwritestatus\undefined \def\normalwritestatus#1#2{\writedirect{#1 : #2}} \fi
+
+% no xml logging in format generation
+
+\everyjob {% we can redefine at the lua end !
+ \doif {\ctxlua{tex.sprint(logs.get_method())}} {xml} {%
+ \long\def\writebanner #1{\writestring {<m t='banner'>#1</m>}}%
+ \long\def\writestatus#1#2{\writestring {<m t='#1'>#2</m>}}%
+ \long\def\message #1{\normalmessage{<m t='message'>#1</m>}}%
+ \let\normalwritestatus\writestatus
+ }%
+}
+
+\endinput
diff --git a/tex/context/base/task-ini.lua b/tex/context/base/task-ini.lua
index aaa97ec49..20cba27b7 100644
--- a/tex/context/base/task-ini.lua
+++ b/tex/context/base/task-ini.lua
@@ -15,14 +15,15 @@ tasks.appendaction("processors", "normalizers", "fonts.collections.process")
tasks.appendaction("processors", "normalizers", "fonts.checkers.missing") -- disabled
tasks.appendaction("processors", "characters", "chars.handle_mirroring") -- disabled
-tasks.appendaction("processors", "characters", "chars.handle_casing") -- disabled
-tasks.appendaction("processors", "characters", "chars.handle_digits") -- disabled
+tasks.appendaction("processors", "characters", "typesetting.cases.handler") -- disabled
+--~ tasks.appendaction("processors", "characters", "typesetting.digits.handler") -- disabled
tasks.appendaction("processors", "characters", "chars.handle_breakpoints") -- disabled
tasks.appendaction("processors", "characters", "scripts.preprocess")
tasks.appendaction("processors", "words", "kernel.hyphenation") -- always on
tasks.appendaction("processors", "words", "languages.words.check") -- disabled
+tasks.appendaction("processors", "fonts", "parbuilders.solutions.splitters.split") -- experimental
tasks.appendaction("processors", "fonts", "nodes.process_characters") -- maybe todo
tasks.appendaction("processors", "fonts", "nodes.inject_kerns") -- maybe todo
tasks.appendaction("processors", "fonts", "nodes.protect_glyphs", nil, "nohead") -- maybe todo
@@ -33,6 +34,8 @@ tasks.appendaction("processors", "fonts", "nodes.stripping.process")
tasks.appendaction("processors", "lists", "lists.handle_spacing") -- disabled
tasks.appendaction("processors", "lists", "lists.handle_kerning") -- disabled
+tasks.appendaction("processors", "lists", "typesetting.digits.handler") -- disabled
+
tasks.appendaction("shipouts", "normalizers", "nodes.cleanup_page") -- maybe todo
tasks.appendaction("shipouts", "normalizers", "nodes.add_references") -- disabled
tasks.appendaction("shipouts", "normalizers", "nodes.add_destinations") -- disabled
@@ -55,6 +58,7 @@ tasks.appendaction("math", "builders", "noads.mlist_to_hlist")
-- quite experimental
tasks.appendaction("finalizers", "lists", "nodes.repackage_graphicvadjust") -- todo
+tasks.appendaction("finalizers", "fonts", "parbuilders.solutions.splitters.optimize") -- experimental
-- rather new
@@ -65,30 +69,33 @@ tasks.appendaction("vboxbuilders", "normalizers", "nodes.handle_vbox_spacing")
-- speedup: only kick in when used
-tasks.disableaction("processors", "fonts.checkers.missing")
-tasks.disableaction("processors", "chars.handle_breakpoints")
-tasks.disableaction("processors", "chars.handle_casing")
-tasks.disableaction("processors", "chars.handle_digits")
-tasks.disableaction("processors", "chars.handle_mirroring")
-tasks.disableaction("processors", "languages.words.check")
-tasks.disableaction("processors", "lists.handle_spacing")
-tasks.disableaction("processors", "lists.handle_kerning")
-tasks.disableaction("processors", "nodes.stripping.process")
-
-tasks.disableaction("shipouts", "nodes.rules.process")
-tasks.disableaction("shipouts", "nodes.shifts.process")
-tasks.disableaction("shipouts", "shipouts.handle_color")
-tasks.disableaction("shipouts", "shipouts.handle_transparency")
-tasks.disableaction("shipouts", "shipouts.handle_colorintent")
-tasks.disableaction("shipouts", "shipouts.handle_effect")
-tasks.disableaction("shipouts", "shipouts.handle_negative")
-tasks.disableaction("shipouts", "shipouts.handle_viewerlayer")
-
-tasks.disableaction("shipouts", "nodes.add_references")
-tasks.disableaction("shipouts", "nodes.add_destinations")
+tasks.disableaction("processors", "fonts.checkers.missing")
+tasks.disableaction("processors", "chars.handle_breakpoints")
+tasks.disableaction("processors", "typesetting.cases.handler")
+tasks.disableaction("processors", "typesetting.digits.handler")
+tasks.disableaction("processors", "chars.handle_mirroring")
+tasks.disableaction("processors", "languages.words.check")
+tasks.disableaction("processors", "lists.handle_spacing")
+tasks.disableaction("processors", "lists.handle_kerning")
+tasks.disableaction("processors", "nodes.stripping.process")
+
+tasks.disableaction("shipouts", "nodes.rules.process")
+tasks.disableaction("shipouts", "nodes.shifts.process")
+tasks.disableaction("shipouts", "shipouts.handle_color")
+tasks.disableaction("shipouts", "shipouts.handle_transparency")
+tasks.disableaction("shipouts", "shipouts.handle_colorintent")
+tasks.disableaction("shipouts", "shipouts.handle_effect")
+tasks.disableaction("shipouts", "shipouts.handle_negative")
+tasks.disableaction("shipouts", "shipouts.handle_viewerlayer")
+
+tasks.disableaction("shipouts", "nodes.add_references")
+tasks.disableaction("shipouts", "nodes.add_destinations")
tasks.disableaction("mvlbuilders", "nodes.migrate_outwards")
+tasks.disableaction("processors", "parbuilders.solutions.splitters.split")
+tasks.disableaction("finalizers", "parbuilders.solutions.splitters.optimize")
+
callbacks.freeze("find_.*_file", "find file using resolver")
callbacks.freeze("read_.*_file", "read file at once")
callbacks.freeze("open_.*_file", "open file for reading")
diff --git a/tex/context/base/trac-deb.lua b/tex/context/base/trac-deb.lua
index 97753f3e9..51bbb8812 100644
--- a/tex/context/base/trac-deb.lua
+++ b/tex/context/base/trac-deb.lua
@@ -6,182 +6,179 @@ if not modules then modules = { } end modules ['trac-deb'] = {
license = "see context related readme files"
}
-if not lmx then lmx = { } end
-if not lmx.variables then lmx.variables = { } end
+local lpegmatch = lpeg.match
+local format, concat = string.format, table.concat
+local tonumber, tostring = tonumber, tostring
+local texdimen, textoks, texcount = tex.dimen, tex.toks, tex.count
-lmx.htmfile = function(name) return environment.jobname .. "-status.html" end
-lmx.lmxfile = function(name) return resolvers.find_file(name,'tex') end
+local tracers = namespaces.private("tracers")
+
+local report_system = logs.new("system")
-if not tracers then tracers = { } end
-if not tracers.list then tracers.list = { } end
-if not tracers.strings then tracers.strings = { } end
+tracers.lists = { }
+tracers.strings = { }
tracers.strings.undefined = "undefined"
-local splitter = lpeg.splitat(":")
-local lpegmatch = lpeg.match
+tracers.lists.scratch = {
+ 0, 2, 4, 6, 8
+}
-function tracers.split(csname)
- return lpegmatch(splitter,csname)
-end
+tracers.lists.internals = {
+ 'p:hsize', 'p:parindent', 'p:leftskip','p:rightskip',
+ 'p:vsize', 'p:parskip', 'p:baselineskip', 'p:lineskip', 'p:topskip'
+}
+
+tracers.lists.context = {
+ 'd:lineheight',
+ 'c:realpageno', 'c:pageno', 'c:subpageno'
+}
+
+local types = {
+ ['d'] = tracers.dimen,
+ ['c'] = tracers.count,
+ ['t'] = tracers.toks,
+ ['p'] = tracers.primitive
+}
+
+local splitboth = lpeg.splitat(":")
+local splittype = lpeg.firstofsplit(":")
+local splitname = lpeg.secondofsplit(":")
function tracers.type(csname)
- tag, name = tracers.split(csname)
- if tag then return tag else return nil end
+ return lpegmatch(splittype,csname)
end
function tracers.name(csname)
- tag, name = tracers.split(csname)
- if tag then return name else return csname end
+ return lpegmatch(splitname,csname) or csname
end
function tracers.cs(csname)
- tag, name = tracers.split(csname)
- if tracers.types[tag] then
- return tracers.types[tag](name)
+ local tag, name = lpegmatch(splitboth,csname)
+ if name and types[tag] then
+ return types[tag](name)
else
return tracers.primitive(csname)
end
end
function tracers.dimen(name)
- return (tex.dimen[name] and number.topoints(tex.dimen[name])) or tracers.strings.undefined
+ local d = texdimen[name]
+ return d and number.topoints(d) or tracers.strings.undefined
end
function tracers.count(name)
- return tex.count[name] or tracers.strings.undefined
+ return texcount[name] or tracers.strings.undefined
end
-function tracers.toks(name)
- return (tex.toks[name] and string.limit(tex.toks[name],40)) or tracers.strings.undefined
+function tracers.toks(name,limit)
+ local t = textoks[name]
+ return t and string.limit(t,tonumber(limit) or 40) or tracers.strings.undefined
end
function tracers.primitive(name)
return tex[name] or tracers.strings.undefined
end
-tracers.types = {
- ['d'] = tracers.dimen,
- ['c'] = tracers.count,
- ['t'] = tracers.toks,
- ['p'] = tracers.primitive
-}
-
function tracers.knownlist(name)
- return tracers.list[name] and #tracers.list[name] > 0
+ local l = tracers.lists[name]
+ return l and #l > 0
end
-function tracers.showdebuginfo()
- local variables = {
- ['title'] = 'ConTeXt Debug Information',
- ['color-background-one'] = lmx.get('color-background-green'),
- ['color-background-two'] = lmx.get('color-background-blue'),
- }
- lmx.show('context-debug.lmx',variables)
-end
-
-function tracers.showerror()
- local filename = status.filename
- local linenumber = tonumber(status.linenumber or "0")
- local variables = {
- ['title'] = 'ConTeXt Error Information',
- ['errormessage'] = status.lasterrorstring,
- ['linenumber'] = status.linenumber,
- ['color-background-one'] = lmx.get('color-background-yellow'),
- ['color-background-two'] = lmx.get('color-background-purple'),
- }
- if not filename then
- variables.filename, variables.errorcontext = 'unknown', 'error in filename'
- elseif type(filename) == "number" then
- variables.filename, variables.errorcontext = "<read " .. filename .. ">", 'unknown error'
- elseif io.exists(filename) then
- -- todo: use an input opener so that we also catch utf16 an reencoding
- lines = io.lines(filename)
- if lines then
- local context = { }
- n, m = 1, linenumber
- b, e = m-10, m+10
- s = string.len(tostring(e))
- for line in lines do
- if n > e then
- break
- elseif n > b then
- if n == m then
- context[#context+1] = string.format("%" .. s .. "d",n) .. " >> " .. line
- else
- context[#context+1] = string.format("%" .. s .. "d",n) .. " " .. line
- end
- end
- n = n + 1
- end
- variables.filename, variables.errorcontext = filename, table.concat(context,"\n")
+function tracers.showlines(filename,linenumber,offset)
+ local data = io.loaddata(filename)
+ local lines = data and string.splitlines(data)
+ if lines and #lines > 0 then
+ offset = tonumber(offset) or 10
+ linenumber = tonumber(linenumber) or 10
+ local start = math.max(linenumber - offset,1)
+ local stop = math.min(linenumber + offset,#lines)
+ if stop > #lines then
+ return "<linenumber past end of file>"
else
- variables.filename, variables.errorcontext = filename, ""
+ local result, fmt = { }, "%" .. #tostring(stop) .. "d %s %s"
+ for n=start,stop do
+ result[#result+1] = format(fmt,n,n == linenumber and ">>" or " ",lines[n])
+ end
+ return concat(result,"\n")
end
else
- variables.filename, variables.errorcontext = filename, 'file not found'
+ return "<empty file>"
end
- lmx.show('context-error.lmx',variables)
end
-function tracers.overloaderror()
- callback.register('show_error_hook', tracers.showerror)
+function tracers.printerror(offset)
+ local filename, linenumber = status.filename, tonumber(status.linenumber) or 0
+ if not filename then
+ report_system("error not related to input file: %s ...",status.lasterrorstring)
+ elseif type(filename) == "number" then
+ report_system("error on line %s of filehandle %s: %s ...",linenumber,filename,status.lasterrorstring)
+ else
+ -- currently we still get the error message printed to the log/console so we
+ -- add a bit of spacing around our variant
+ texio.write_nl("\n")
+ report_system("error on line %s in file %s: %s ...\n",linenumber,filename,status.lasterrorstring or "?") -- lua error?
+ texio.write_nl(tracers.showlines(filename,linenumber,offset),"\n")
+ end
end
-tracers.list['scratch'] = {
- 0, 2, 4, 6, 8
-}
-
-tracers.list['internals'] = {
- 'p:hsize', 'p:parindent', 'p:leftskip','p:rightskip',
- 'p:vsize', 'p:parskip', 'p:baselineskip', 'p:lineskip', 'p:topskip'
-}
+directives.register("system.errorcontext", function(v)
+ if v then
+ callback.register('show_error_hook', function() tracers.printerror(v) end)
+ else
+ callback.register('show_error_hook', nil)
+ end
+end)
-tracers.list['context'] = {
- 'd:lineheight',
- 'c:realpageno', 'c:pageno', 'c:subpageno'
-}
+-- this might move
--- dumping the hash
+local lmx = namespaces.private("lmx")
--- \starttext
--- \ctxlua{tracers.dump_hash()}
--- \stoptext
+if not lmx.variables then lmx.variables = { } end
-local saved = { }
+lmx.htmfile = function(name) return environment.jobname .. "-status.html" end
+lmx.lmxfile = function(name) return resolvers.find_file(name,'tex') end
-function tracers.save_hash()
- saved = tex.hashtokens()
+function lmx.showdebuginfo(lmxname)
+ local variables = {
+ ['title'] = 'ConTeXt Debug Information',
+ ['color-background-one'] = lmx.get('color-background-green'),
+ ['color-background-two'] = lmx.get('color-background-blue'),
+ }
+ if lmxname == false then
+ return variables
+ else
+ lmx.show(lmxname or 'context-debug.lmx',variables)
+ end
end
-function tracers.dump_hash(filename,delta)
- filename = filename or tex.jobname .. "-hash.log"
- local list = { }
- local hash = tex.hashtokens()
- local command_name = token.command_name
- for name, token in next, hash do
- if not delta or not saved[name] then
- -- token: cmd, chr, csid -- combination cmd,chr determines name
- local kind = command_name(token)
- local dk = list[kind]
- if not dk then
- -- a bit funny names but this sorts better (easier to study)
- dk = { names = { }, found = 0, code = token[1] }
- list[kind] = dk
- end
- dk.names[name] = { token[2], token[3] }
- dk.found = dk.found + 1
- end
+function lmx.showerror(lmxname)
+ local filename, linenumber, errorcontext = status.filename, tonumber(status.linenumber) or 0, ""
+ if not filename then
+ filename, errorcontext = 'unknown', 'error in filename'
+ elseif type(filename) == "number" then
+ filename, errorcontext = format("<read %s>",filename), 'unknown error'
+ else
+ errorcontext = tracers.showlines(filename,linenumber,offset)
+ end
+ local variables = {
+ ['title'] = 'ConTeXt Error Information',
+ ['errormessage'] = status.lasterrorstring,
+ ['linenumber'] = linenumber,
+ ['color-background-one'] = lmx.get('color-background-yellow'),
+ ['color-background-two'] = lmx.get('color-background-purple'),
+ ['filename'] = filename,
+ ['errorcontext'] = errorcontext,
+ }
+ if lmxname == false then
+ return variables
+ else
+ lmx.show(lmxname or 'context-error.lmx',variables)
end
- io.savedata(filename,table.serialize(list,true))
end
-function tracers.register_dump_hash(delta)
- if delta then
- tracers.save_hash()
- end
- main.register_stop_actions(1,function() tracers.dump_hash(nil,true) end) -- at front
+function lmx.overloaderror()
+ callback.register('show_error_hook', function() lmx.showerror() end) -- prevents arguments being passed
end
-directives.register("system.dumphash", function() tracers.register_dump_hash(false) end)
-directives.register("system.dumpdelta", function() tracers.register_dump_hash(true ) end)
+directives.register("system.showerror", lmx.overloaderror)
diff --git a/tex/context/base/trac-deb.mkiv b/tex/context/base/trac-deb.mkiv
index b004cdeb4..e88a8a3f2 100644
--- a/tex/context/base/trac-deb.mkiv
+++ b/tex/context/base/trac-deb.mkiv
@@ -13,30 +13,11 @@
\writestatus{loading}{ConTeXt Tracing Macros / Debugger}
+\registerctxluafile{trac-lmx}{1.001}
\registerctxluafile{trac-deb}{1.001}
-\def\showdebuginfo{\ctxlua{tracers.showdebuginfo()}}
-\def\overloaderror{\ctxlua{tracers.overloaderror()}}
-
\def\breakpoint{\showdebuginfo\wait}
-\appendtoks
- \ctxlua {
- if debugger.tracing() then
- debugger.enable() ;
- end
- }%
-\to \everyjob
-
-\appendtoks
- \ctxlua {
- if debugger.tracing() then
- debugger.disable() ;
- debugger.savestats("\jobname-luacalls.log") ;
- end
- }%
-\to \everybye
-
\def\showtrackers {\ctxlua{trackers.show()}}
\def\enabletrackers [#1]{\ctxlua{trackers.enable("#1")}}
\def\disabletrackers [#1]{\ctxlua{trackers.disable("#1")}}
@@ -49,3 +30,6 @@
\def\showexperiments {\ctxlua{experiments.show()}}
\def\enableexperiments [#1]{\ctxlua{experiments.enable("#1")}}
\def\disableexperiments[#1]{\ctxlua{experiments.disable("#1")}}
+
+\def\showdebuginfo{\ctxlua{lmx.showdebuginfo()}}
+\def\overloaderror{\ctxlua{lmx.overloaderror()}} % \enabledirectives[system.showerror]
diff --git a/tex/context/base/trac-inf.lua b/tex/context/base/trac-inf.lua
index 72f03675a..4e9280d50 100644
--- a/tex/context/base/trac-inf.lua
+++ b/tex/context/base/trac-inf.lua
@@ -6,7 +6,13 @@ if not modules then modules = { } end modules ['trac-inf'] = {
license = "see context related readme files"
}
+-- As we want to protect the global tables, we no longer store the timing
+-- in the tables themselves but in a hidden timers table so that we don't
+-- get warnings about assignments. This is more efficient than using rawset
+-- and rawget.
+
local format = string.format
+local clock = os.gettimeofday or os.clock -- should go in environment
local statusinfo, n, registered = { }, 0, { }
@@ -15,91 +21,81 @@ statistics = statistics or { }
statistics.enable = true
statistics.threshold = 0.05
--- timing functions
-
-local clock = os.gettimeofday or os.clock
-
-local notimer
+local timers = { }
-function statistics.hastimer(instance)
- return instance and instance.starttime
+local function hastiming(instance)
+ return instance and timers[instance]
end
-function statistics.resettiming(instance)
- if not instance then
- notimer = { timing = 0, loadtime = 0 }
- else
- instance.timing, instance.loadtime = 0, 0
- end
+local function resettiming(instance)
+ timers[instance or "notimer"] = { timing = 0, loadtime = 0 }
end
-function statistics.starttiming(instance)
- if not instance then
- notimer = { }
- instance = notimer
+local function starttiming(instance)
+ local timer = timers[instance or "notimer"]
+ if not timer then
+ timer = { }
+ timers[instance or "notimer"] = timer
end
- local it = instance.timing
+ local it = timer.timing
if not it then
it = 0
end
if it == 0 then
- instance.starttime = clock()
- if not instance.loadtime then
- instance.loadtime = 0
+ timer.starttime = clock()
+ if not timer.loadtime then
+ timer.loadtime = 0
end
- else
---~ logs.report("system","nested timing (%s)",tostring(instance))
end
- instance.timing = it + 1
+ timer.timing = it + 1
end
-function statistics.stoptiming(instance, report)
- if not instance then
- instance = notimer
- end
- if instance then
- local it = instance.timing
- if it > 1 then
- instance.timing = it - 1
- else
- local starttime = instance.starttime
- if starttime then
- local stoptime = clock()
- local loadtime = stoptime - starttime
- instance.stoptime = stoptime
- instance.loadtime = instance.loadtime + loadtime
- if report then
- statistics.report("load time %0.3f",loadtime)
- end
- instance.timing = 0
- return loadtime
+local function stoptiming(instance, report)
+ local timer = timers[instance or "notimer"]
+ local it = timer.timing
+ if it > 1 then
+ timer.timing = it - 1
+ else
+ local starttime = timer.starttime
+ if starttime then
+ local stoptime = clock()
+ local loadtime = stoptime - starttime
+ timer.stoptime = stoptime
+ timer.loadtime = timer.loadtime + loadtime
+ if report then
+ statistics.report("load time %0.3f",loadtime)
end
+ timer.timing = 0
+ return loadtime
end
end
return 0
end
-function statistics.elapsedtime(instance)
- if not instance then
- instance = notimer
- end
- return format("%0.3f",(instance and instance.loadtime) or 0)
+local function elapsedtime(instance)
+ local timer = timers[instance or "notimer"]
+ return format("%0.3f",timer and timer.loadtime or 0)
end
-function statistics.elapsedindeed(instance)
- if not instance then
- instance = notimer
- end
- local t = (instance and instance.loadtime) or 0
- return t > statistics.threshold
+local function elapsedindeed(instance)
+ local timer = timers[instance or "notimer"]
+ return (timer and timer.loadtime or 0) > statistics.threshold
end
-function statistics.elapsedseconds(instance,rest) -- returns nil if 0 seconds
- if statistics.elapsedindeed(instance) then
- return format("%s seconds %s", statistics.elapsedtime(instance),rest or "")
+local function elapsedseconds(instance,rest) -- returns nil if 0 seconds
+ if elapsedindeed(instance) then
+ return format("%s seconds %s", elapsedtime(instance),rest or "")
end
end
+statistics.hastiming = hastiming
+statistics.resettiming = resettiming
+statistics.starttiming = starttiming
+statistics.stoptiming = stoptiming
+statistics.elapsedtime = elapsedtime
+statistics.elapsedindeed = elapsedindeed
+statistics.elapsedseconds = elapsedseconds
+
-- general function
function statistics.register(tag,fnc)
@@ -128,7 +124,6 @@ function statistics.show(reporter)
end)
register("current memory usage", statistics.memused)
register("runtime",statistics.runtime)
--- --
for i=1,#statusinfo do
local s = statusinfo[i]
local r = s[2]()
@@ -142,7 +137,13 @@ function statistics.show(reporter)
end
function statistics.show_job_stat(tag,data,n)
- texio.write_nl(format("%-15s: %s - %s","mkiv lua stats",tag:rpadd(n," "),data))
+ if type(data) == "table" then
+ for i=1,#data do
+ statistics.show_job_stat(tag,data[i],n)
+ end
+ else
+ texio.write_nl(format("%-15s: %s - %s","mkiv lua stats",tag:rpadd(n," "),data))
+ end
end
function statistics.memused() -- no math.round yet -)
@@ -150,48 +151,35 @@ function statistics.memused() -- no math.round yet -)
return format("%s MB (ctx: %s MB)",round(collectgarbage("count")/1000), round(status.luastate_bytes/1000000))
end
-if statistics.runtime then
- -- already loaded and set
-elseif luatex and luatex.starttime then
- statistics.starttime = luatex.starttime
- statistics.loadtime = 0
- statistics.timing = 0
-else
- statistics.starttiming(statistics)
-end
+starttiming(statistics)
-function statistics.runtime()
- statistics.stoptiming(statistics)
- return statistics.formatruntime(statistics.elapsedtime(statistics))
+function statistics.formatruntime(runtime) -- indirect so it can be overloaded and
+ return format("%s seconds", runtime) -- indeed that happens in cure-uti.lua
end
-function statistics.formatruntime(runtime)
- return format("%s seconds", statistics.elapsedtime(statistics))
+function statistics.runtime()
+ stoptiming(statistics)
+ return statistics.formatruntime(elapsedtime(statistics))
end
function statistics.timed(action,report)
- local timer = { }
report = report or logs.simple
- statistics.starttiming(timer)
+ starttiming("run")
action()
- statistics.stoptiming(timer)
- report("total runtime: %s",statistics.elapsedtime(timer))
+ stoptiming("run")
+ report("total runtime: %s",elapsedtime("run"))
end
-- where, not really the best spot for this:
commands = commands or { }
-local timer
-
-function commands.resettimer()
- statistics.resettiming(timer)
- statistics.starttiming(timer)
+function commands.resettimer(name)
+ resettiming(name or "whatever")
+ starttiming(name or "whatever")
end
-function commands.elapsedtime()
- statistics.stoptiming(timer)
- tex.sprint(statistics.elapsedtime(timer))
+function commands.elapsedtime(name)
+ stoptiming(name or "whatever")
+ tex.sprint(elapsedtime(name or "whatever"))
end
-
-commands.resettimer()
diff --git a/tex/context/base/trac-lmx.lua b/tex/context/base/trac-lmx.lua
index 664815c66..74e711ea4 100644
--- a/tex/context/base/trac-lmx.lua
+++ b/tex/context/base/trac-lmx.lua
@@ -10,7 +10,9 @@ if not modules then modules = { } end modules ['trac-lmx'] = {
local gsub, format, concat, byte = string.gsub, string.format, table.concat, string.byte
-lmx = lmx or { }
+local lmx = namespaces.private("lmx")
+
+lmx.variables = lmx.variables or { } -- global, shared
local escapes = {
['&'] = '&amp;',
@@ -21,8 +23,6 @@ local escapes = {
-- variables
-lmx.variables = { } -- global, shared
-
local lmxvariables = lmx.variables
lmxvariables['title-default'] = 'ConTeXt LMX File'
@@ -67,7 +67,7 @@ local function do_urlescaped(str)
return (gsub(str,"[^%a%d]",format("%%0x",byte("%1"))))
end
-function do_type(str)
+local function do_type(str)
if str then do_print("<tt>" .. do_escape(str) .. "</tt>") end
end
diff --git a/tex/context/base/trac-log.lua b/tex/context/base/trac-log.lua
index 0d4a1b0a9..ae1130e65 100644
--- a/tex/context/base/trac-log.lua
+++ b/tex/context/base/trac-log.lua
@@ -6,20 +6,17 @@ if not modules then modules = { } end modules ['trac-log'] = {
license = "see context related readme files"
}
--- this is old code that needs an overhaul
+-- xml logging is only usefull in normal runs, not in ini mode
+-- it looks like some tex logging (like filenames) is broken (no longer
+-- interceoted at the tex end so the xml variant is not that useable now)
--~ io.stdout:setvbuf("no")
--~ io.stderr:setvbuf("no")
-local write_nl, write = texio.write_nl or print, texio.write or io.write
+local write_nl, write = texio and texio.write_nl or print, texio and texio.write or io.write
local format, gmatch = string.format, string.gmatch
local texcount = tex and tex.count
-if texlua then
- write_nl = print
- write = io.write
-end
-
--[[ldx--
<p>This is a prelude to a more extensive logging module. For the sake
of parsing log files, in addition to the standard logging we will
@@ -27,65 +24,93 @@ provide an <l n='xml'/> structured file. Actually, any logging that
is hooked into callbacks will be \XML\ by default.</p>
--ldx]]--
-logs = logs or { }
-logs.xml = logs.xml or { }
-logs.tex = logs.tex or { }
+logs = logs or { }
--[[ldx--
<p>This looks pretty ugly but we need to speed things up a bit.</p>
--ldx]]--
-logs.moreinfo = [[
-more information about ConTeXt and the tools that come with it can be found at:
+local moreinfo = [[
+More information about ConTeXt and the tools that come with it can be found at:
maillist : ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
webpage : http://www.pragma-ade.nl / http://tex.aanhet.net
wiki : http://contextgarden.net
]]
-logs.levels = {
- ['error'] = 1,
- ['warning'] = 2,
- ['info'] = 3,
- ['debug'] = 4,
-}
-
-logs.functions = {
- 'report', 'start', 'stop', 'push', 'pop', 'line', 'direct',
+local functions = {
+ 'report', 'status', 'start', 'stop', 'push', 'pop', 'line', 'direct',
'start_run', 'stop_run',
'start_page_number', 'stop_page_number',
'report_output_pages', 'report_output_log',
'report_tex_stat', 'report_job_stat',
'show_open', 'show_close', 'show_load',
+ 'dummy',
}
-logs.tracers = {
-}
+local method = "nop"
-logs.level = 0
-logs.mode = string.lower((os.getenv("MTX.LOG.MODE") or os.getenv("MTX_LOG_MODE") or "tex"))
+function logs.set_method(newmethod)
+ method = newmethod
+ -- a direct copy might be faster but let's try this for a while
+ setmetatable(logs, { __index = logs[method] })
+end
-function logs.set_level(level)
- logs.level = logs.levels[level] or level
+function logs.get_method()
+ return method
end
-function logs.set_method(method)
- for _, v in next, logs.functions do
- logs[v] = logs[method][v] or function() end
+-- installer
+
+local data = { }
+
+function logs.new(category)
+ local logger = data[category]
+ if not logger then
+ logger = function(...)
+ logs.report(category,...)
+ end
+ data[category] = logger
end
+ return logger
+end
+
+--~ local report = logs.new("fonts")
+
+
+-- nop logging (maybe use __call instead)
+
+local noplog = { } logs.nop = noplog setmetatable(logs, { __index = noplog })
+
+for i=1,#functions do
+ noplog[functions[i]] = function() end
end
-- tex logging
-function logs.tex.report(category,fmt,...) -- new
- if fmt then
- write_nl(category .. " | " .. format(fmt,...))
+local texlog = { } logs.tex = texlog setmetatable(texlog, { __index = noplog })
+
+function texlog.report(a,b,c,...)
+ if c then
+ write_nl(format("%-16s> %s\n",a,format(b,c,...)))
+ elseif b then
+ write_nl(format("%-16s> %s\n",a,b))
else
- write_nl(category .. " |")
+ write_nl(format("%-16s>\n",a))
end
end
-function logs.tex.line(fmt,...) -- new
+function texlog.status(a,b,c,...)
+ if c then
+ write_nl(format("%-16s: %s\n",a,format(b,c,...)))
+ elseif b then
+ write_nl(format("%-16s: %s\n",a,b)) -- b can have %'s
+ else
+ write_nl(format("%-16s:>\n",a))
+ end
+end
+
+function texlog.line(fmt,...) -- new
if fmt then
write_nl(format(fmt,...))
else
@@ -93,62 +118,58 @@ function logs.tex.line(fmt,...) -- new
end
end
---~ function logs.tex.start_page_number()
---~ local real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
---~ if real > 0 then
---~ if user > 0 then
---~ if sub > 0 then
---~ write(format("[%s.%s.%s",real,user,sub))
---~ else
---~ write(format("[%s.%s",real,user))
---~ end
---~ else
---~ write(format("[%s",real))
---~ end
---~ else
---~ write("[-")
---~ end
---~ end
-
---~ function logs.tex.stop_page_number()
---~ write("]")
---~ end
-
local real, user, sub
-function logs.tex.start_page_number()
+function texlog.start_page_number()
real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
end
-function logs.tex.stop_page_number()
+local report_pages = logs.new("pages") -- not needed but saves checking when we grep for it
+
+function texlog.stop_page_number()
if real > 0 then
if user > 0 then
if sub > 0 then
- logs.report("pages", "flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
else
- logs.report("pages", "flushing realpage %s, userpage %s",real,user)
+ report_pages("flushing realpage %s, userpage %s",real,user)
end
else
- logs.report("pages", "flushing realpage %s",real)
+ report_pages("flushing realpage %s",real)
end
else
- logs.report("pages", "flushing page")
+ report_pages("flushing page")
end
io.flush()
end
-logs.tex.report_job_stat = statistics.show_job_stat
+texlog.report_job_stat = statistics and statistics.show_job_stat
-- xml logging
-function logs.xml.report(category,fmt,...) -- new
- if fmt then
- write_nl(format("<r category='%s'>%s</r>",category,format(fmt,...)))
+local xmllog = { } logs.xml = xmllog setmetatable(xmllog, { __index = noplog })
+
+function xmllog.report(category,fmt,s,...) -- new
+ if s then
+ write_nl(format("<r category='%s'>%s</r>",category,format(fmt,s,...)))
+ elseif fmt then
+ write_nl(format("<r category='%s'>%s</r>",category,fmt))
else
write_nl(format("<r category='%s'/>",category))
end
end
-function logs.xml.line(fmt,...) -- new
+
+function xmllog.status(category,fmt,s,...)
+ if s then
+ write_nl(format("<s category='%s'>%s</r>",category,format(fmt,s,...)))
+ elseif fmt then
+ write_nl(format("<s category='%s'>%s</r>",category,fmt))
+ else
+ write_nl(format("<s category='%s'/>",category))
+ end
+end
+
+function xmllog.line(fmt,...) -- new
if fmt then
write_nl(format("<r>%s</r>",format(fmt,...)))
else
@@ -156,64 +177,78 @@ function logs.xml.line(fmt,...) -- new
end
end
-function logs.xml.start() if logs.level > 0 then tw("<%s>" ) end end
-function logs.xml.stop () if logs.level > 0 then tw("</%s>") end end
-function logs.xml.push () if logs.level > 0 then tw("<!-- ") end end
-function logs.xml.pop () if logs.level > 0 then tw(" -->" ) end end
+function xmllog.start() write_nl("<%s>" ) end
+function xmllog.stop () write_nl("</%s>") end
+function xmllog.push () write_nl("<!-- ") end
+function xmllog.pop () write_nl(" -->" ) end
-function logs.xml.start_run()
+function xmllog.start_run()
write_nl("<?xml version='1.0' standalone='yes'?>")
write_nl("<job>") -- xmlns='www.pragma-ade.com/luatex/schemas/context-job.rng'
write_nl("")
end
-function logs.xml.stop_run()
+function xmllog.stop_run()
write_nl("</job>")
end
-function logs.xml.start_page_number()
+function xmllog.start_page_number()
write_nl(format("<p real='%s' page='%s' sub='%s'", texcount.realpageno, texcount.userpageno, texcount.subpageno))
end
-function logs.xml.stop_page_number()
+function xmllog.stop_page_number()
write("/>")
write_nl("")
end
-function logs.xml.report_output_pages(p,b)
+function xmllog.report_output_pages(p,b)
write_nl(format("<v k='pages' v='%s'/>", p))
write_nl(format("<v k='bytes' v='%s'/>", b))
write_nl("")
end
-function logs.xml.report_output_log()
+function xmllog.report_output_log()
+ -- nothing
end
-function logs.xml.report_tex_stat(k,v)
- texiowrite_nl("log","<v k='"..k.."'>"..tostring(v).."</v>")
+function xmllog.report_tex_stat(k,v)
+ write_nl("log","<v k='"..k.."'>"..tostring(v).."</v>")
end
-local level = 0
+local nesting = 0
-function logs.xml.show_open(name)
- level = level + 1
- texiowrite_nl(format("<f l='%s' n='%s'>",level,name))
+function xmllog.show_open(name)
+ nesting = nesting + 1
+ write_nl(format("<f l='%s' n='%s'>",nesting,name))
end
-function logs.xml.show_close(name)
- texiowrite("</f> ")
- level = level - 1
+function xmllog.show_close(name)
+ write("</f> ")
+ nesting = nesting - 1
end
-function logs.xml.show_load(name)
- texiowrite_nl(format("<f l='%s' n='%s'/>",level+1,name))
+function xmllog.show_load(name)
+ write_nl(format("<f l='%s' n='%s'/>",nesting+1,name))
end
---
+-- initialization
+
+if tex and (tex.jobname or tex.formatname) then
+ -- todo: this can be set in mtxrun ... or maybe we should just forget about this alternative format
+ if (os.getenv("mtx.directives.logmethod") or os.getenv("mtx_directives_logmethod")) == "xml" then
+ logs.set_method('xml')
+ else
+ logs.set_method('tex')
+ end
+else
+ logs.set_method('nop')
+end
+
+-- logging in runners -> these are actually the nop loggers
local name, banner = 'report', 'context'
-local function report(category,fmt,...)
+function noplog.report(category,fmt,...) -- todo: fmt,s
if fmt then
write_nl(format("%s | %s: %s",name,category,format(fmt,...)))
elseif category then
@@ -223,7 +258,9 @@ local function report(category,fmt,...)
end
end
-local function simple(fmt,...)
+noplog.status = noplog.report -- just to be sure, never used
+
+function noplog.simple(fmt,...) -- todo: fmt,s
if fmt then
write_nl(format("%s | %s",name,format(fmt,...)))
else
@@ -231,40 +268,18 @@ local function simple(fmt,...)
end
end
-function logs.setprogram(_name_,_banner_,_verbose_)
- name, banner = _name_, _banner_
- if _verbose_ then
- trackers.enable("resolvers.locating")
- end
- logs.set_method("tex")
- logs.report = report -- also used in libraries
- logs.simple = simple -- only used in scripts !
- if utils then
- utils.report = simple
- end
- logs.verbose = _verbose_
+if utils then
+ utils.report = function(...) logs.simple(...) end
end
-function logs.setverbose(what)
- if what then
- trackers.enable("resolvers.locating")
- else
- trackers.disable("resolvers.locating")
- end
- logs.verbose = what or false
+function logs.setprogram(newname,newbanner)
+ name, banner = newname, newbanner
end
-function logs.extendbanner(_banner_,_verbose_)
- banner = banner .. " | ".. _banner_
- if _verbose_ ~= nil then
- logs.setverbose(what)
- end
+function logs.extendbanner(newbanner)
+ banner = banner .. " | ".. newbanner
end
-logs.verbose = false
-logs.report = logs.tex.report
-logs.simple = logs.tex.report
-
function logs.reportlines(str) -- todo: <lines></lines>
for line in gmatch(str,"(.-)[\n\r]") do
logs.report(line)
@@ -275,7 +290,15 @@ function logs.reportline() -- for scripts too
logs.report()
end
-logs.simpleline = logs.reportline
+function logs.simpleline()
+ logs.report()
+end
+
+function logs.simplelines(str) -- todo: <lines></lines>
+ for line in gmatch(str,"(.-)[\n\r]") do
+ logs.simple(line)
+ end
+end
function logs.reportbanner() -- for scripts too
logs.report(banner)
@@ -285,21 +308,26 @@ function logs.help(message,option)
logs.reportbanner()
logs.reportline()
logs.reportlines(message)
- local moreinfo = logs.moreinfo or ""
- if moreinfo ~= "" and option ~= "nomoreinfo" then
+ if option ~= "nomoreinfo" then
logs.reportline()
logs.reportlines(moreinfo)
end
end
-logs.set_level('error')
-logs.set_method('tex')
+-- logging to a file
+
+--~ local syslogname = "oeps.xxx"
+--~
+--~ for i=1,10 do
+--~ logs.system(syslogname,"context","test","fonts","font %s recached due to newer version (%s)","blabla","123")
+--~ end
function logs.system(whereto,process,jobname,category,...)
+ local message = format("%s %s => %s => %s => %s\r",os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...))
for i=1,10 do
local f = io.open(whereto,"a")
if f then
- f:write(format("%s %s => %s => %s => %s\r",os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...)))
+ f:write(message)
f:close()
break
else
@@ -308,13 +336,32 @@ function logs.system(whereto,process,jobname,category,...)
end
end
---~ local syslogname = "oeps.xxx"
---~
---~ for i=1,10 do
---~ logs.system(syslogname,"context","test","fonts","font %s recached due to newer version (%s)","blabla","123")
---~ end
+-- bonus
function logs.fatal(where,...)
logs.report(where,"fatal error: %s, aborting now",format(...))
os.exit()
end
+
+--~ the traditional tex page number logging
+--~
+--~ function logs.tex.start_page_number()
+--~ local real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+--~ if real > 0 then
+--~ if user > 0 then
+--~ if sub > 0 then
+--~ write(format("[%s.%s.%s",real,user,sub))
+--~ else
+--~ write(format("[%s.%s",real,user))
+--~ end
+--~ else
+--~ write(format("[%s",real))
+--~ end
+--~ else
+--~ write("[-")
+--~ end
+--~ end
+--~
+--~ function logs.tex.stop_page_number()
+--~ write("]")
+--~ end
diff --git a/tex/context/base/trac-pro.lua b/tex/context/base/trac-pro.lua
new file mode 100644
index 000000000..bfcd71138
--- /dev/null
+++ b/tex/context/base/trac-pro.lua
@@ -0,0 +1,207 @@
+if not modules then modules = { } end modules ['trac-pro'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local getmetatable, setmetatable, rawset, type = getmetatable, setmetatable, rawset, type
+
+-- The protection implemented here is probably not that tight but good enough to catch
+-- problems due to naive usage.
+--
+-- There's a more extensive version (trac-xxx.lua) that supports nesting.
+--
+-- This will change when we have _ENV in lua 5.2+
+
+local trace_namespaces = false trackers.register("system.namespaces", function(v) trace_namespaces = v end)
+
+local report_system = logs.new("system")
+
+namespaces = { }
+
+local registered = { }
+
+local function report_index(k,name)
+ if trace_namespaces then
+ report_system("reference to '%s' in protected namespace '%s', %s",k,name,debug.traceback())
+ else
+ report_system("reference to '%s' in protected namespace '%s'",k,name)
+ end
+end
+
+local function report_newindex(k,name)
+ if trace_namespaces then
+ report_system("assignment to '%s' in protected namespace '%s', %s",k,name,debug.traceback())
+ else
+ report_system("assignment to '%s' in protected namespace '%s'",k,name)
+ end
+end
+
+local function register(name)
+ local data = name == "global" and _G or _G[name]
+ if not data then
+ return -- error
+ end
+ registered[name] = data
+ local m = getmetatable(data)
+ if not m then
+ m = { }
+ setmetatable(data,m)
+ end
+ local index, newindex = { }, { }
+ m.__saved__index = m.__index
+ m.__no__index = function(t,k)
+ if not index[k] then
+ index[k] = true
+ report_index(k,name)
+ end
+ return nil
+ end
+ m.__saved__newindex = m.__newindex
+ m.__no__newindex = function(t,k,v)
+ if not newindex[k] then
+ newindex[k] = true
+ report_newindex(k,name)
+ end
+ rawset(t,k,v)
+ end
+ m.__protection__depth = 0
+end
+
+local function private(name) -- maybe save name
+ local data = registered[name]
+ if not data then
+ data = _G[name]
+ if not data then
+ data = { }
+ _G[name] = data
+ end
+ register(name)
+ end
+ return data
+end
+
+local function protect(name)
+ local data = registered[name]
+ if not data then
+ return
+ end
+ local m = getmetatable(data)
+ local pd = m.__protection__depth
+ if pd > 0 then
+ m.__protection__depth = pd + 1
+ else
+ m.__save_d_index, m.__saved__newindex = m.__index, m.__newindex
+ m.__index, m.__newindex = m.__no__index, m.__no__newindex
+ m.__protection__depth = 1
+ end
+end
+
+local function unprotect(name)
+ local data = registered[name]
+ if not data then
+ return
+ end
+ local m = getmetatable(data)
+ local pd = m.__protection__depth
+ if pd > 1 then
+ m.__protection__depth = pd - 1
+ else
+ m.__index, m.__newindex = m.__saved__index, m.__saved__newindex
+ m.__protection__depth = 0
+ end
+end
+
+local function protectall()
+ for name, _ in next, registered do
+ if name ~= "global" then
+ protect(name)
+ end
+ end
+end
+
+local function unprotectall()
+ for name, _ in next, registered do
+ if name ~= "global" then
+ unprotect(name)
+ end
+ end
+end
+
+namespaces.register = register -- register when defined
+namespaces.private = private -- allocate and register if needed
+namespaces.protect = protect
+namespaces.unprotect = unprotect
+namespaces.protectall = protectall
+namespaces.unprotectall = unprotectall
+
+namespaces.private("namespaces") registered = { } register("global") -- unreachable
+
+directives.register("system.protect", function(v)
+ if v then
+ protectall()
+ else
+ unprotectall()
+ end
+end)
+
+directives.register("system.checkglobals", function(v)
+ if v then
+ report_system("enabling global namespace guard")
+ protect("global")
+ else
+ report_system("disabling global namespace guard")
+ unprotect("global")
+ end
+end)
+
+-- dummy section (will go to luat-dum.lua)
+
+--~ if not namespaces.private then
+--~ -- somewhat protected
+--~ local registered = { }
+--~ function namespaces.private(name)
+--~ local data = registered[name]
+--~ if data then
+--~ return data
+--~ end
+--~ local data = _G[name]
+--~ if not data then
+--~ data = { }
+--~ _G[name] = data
+--~ end
+--~ registered[name] = data
+--~ return data
+--~ end
+--~ function namespaces.protectall(list)
+--~ for name, data in next, list or registered do
+--~ setmetatable(data, { __newindex = function() print(string.format("table %s is protected",name)) end })
+--~ end
+--~ end
+--~ namespaces.protectall { namespaces = namespaces }
+--~ end
+
+--~ directives.enable("system.checkglobals")
+
+--~ namespaces.register("resolvers","trackers")
+--~ namespaces.protect("resolvers")
+--~ namespaces.protect("resolvers")
+--~ namespaces.protect("resolvers")
+--~ namespaces.unprotect("resolvers")
+--~ namespaces.unprotect("resolvers")
+--~ namespaces.unprotect("resolvers")
+--~ namespaces.protect("trackers")
+
+--~ resolvers.x = true
+--~ resolvers.y = true
+--~ trackers.a = ""
+--~ resolvers.z = true
+--~ oeps = { }
+
+--~ resolvers = namespaces.private("resolvers")
+--~ fonts = namespaces.private("fonts")
+--~ directives.enable("system.protect")
+--~ namespaces.protectall()
+--~ resolvers.xx = { }
diff --git a/tex/context/base/trac-set.lua b/tex/context/base/trac-set.lua
new file mode 100644
index 000000000..a9c55f954
--- /dev/null
+++ b/tex/context/base/trac-set.lua
@@ -0,0 +1,254 @@
+if not modules then modules = { } end modules ['trac-set'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local type, next, tostring = type, next, tostring
+local concat = table.concat
+local format, find, lower, gsub = string.format, string.find, string.lower, string.gsub
+local is_boolean = string.is_boolean
+
+setters = { }
+
+local data = { } -- maybe just local
+
+-- We can initialize from the cnf file. This is sort of tricky as
+-- laster defined setters also need to be initialized then. If set
+-- this way, we need to ensure that they are not reset later on.
+
+local trace_initialize = false
+
+local function report(what,filename,name,key,value)
+ texio.write_nl(format("%s setter, filename: %s, name: %s, key: %s, value: %s",what,filename,name,key,value))
+end
+
+function setters.initialize(filename,name,values) -- filename only for diagnostics
+ local data = data[name]
+ if data then
+ data = data.data
+ if data then
+ for key, value in next, values do
+ key = gsub(key,"_",".")
+ value = is_boolean(value,value)
+ local functions = data[key]
+ if functions then
+ if #functions > 0 and not functions.value then
+ if trace_initialize then
+ report("doing",filename,name,key,value)
+ end
+ for i=1,#functions do
+ functions[i](value)
+ end
+ functions.value = value
+ else
+ if trace_initialize then
+ report("skipping",filename,name,key,value)
+ end
+ end
+ else
+ -- we do a simple preregistration i.e. not in the
+ -- list as it might be an obsolete entry
+ functions = { default = value }
+ data[key] = functions
+ if trace_initialize then
+ report("storing",filename,name,key,value)
+ end
+ end
+ end
+ end
+ end
+end
+
+-- user interface code
+
+local function set(t,what,newvalue)
+ local data, done = t.data, t.done
+ if type(what) == "string" then
+ what = aux.settings_to_hash(what) -- inefficient but ok
+ end
+ for w, value in next, what do
+ if value == "" then
+ value = newvalue
+ elseif not value then
+ value = false -- catch nil
+ else
+ value = is_boolean(value,value)
+ end
+ for name, functions in next, data do
+ if done[name] then
+ -- prevent recursion due to wildcards
+ elseif find(name,w) then
+ done[name] = true
+ for i=1,#functions do
+ functions[i](value)
+ end
+ functions.value = value
+ end
+ end
+ end
+end
+
+local function reset(t)
+ for name, functions in next, t.data do
+ for i=1,#functions do
+ functions[i](false)
+ end
+ functions.value = false
+ end
+end
+
+local function enable(t,what)
+ set(t,what,true)
+end
+
+local function disable(t,what)
+ local data = t.data
+ if not what or what == "" then
+ t.done = { }
+ reset(t)
+ else
+ set(t,what,false)
+ end
+end
+
+function setters.register(t,what,...)
+ local data = t.data
+ what = lower(what)
+ local functions = data[what]
+ if not functions then
+ functions = { }
+ data[what] = functions
+ end
+ local default = functions.default -- can be set from cnf file
+ for _, fnc in next, { ... } do
+ local typ = type(fnc)
+ if typ == "string" then
+ local s = fnc -- else wrong reference
+ fnc = function(value) set(t,s,value) end
+ elseif typ ~= "function" then
+ fnc = nil
+ end
+ if fnc then
+ functions[#functions+1] = fnc
+ if default then
+ fnc(default)
+ functions.value = default
+ end
+ end
+ end
+end
+
+function setters.enable(t,what)
+ local e = t.enable
+ t.enable, t.done = enable, { }
+ enable(t,string.simpleesc(tostring(what)))
+ t.enable, t.done = e, { }
+end
+
+function setters.disable(t,what)
+ local e = t.disable
+ t.disable, t.done = disable, { }
+ disable(t,string.simpleesc(tostring(what)))
+ t.disable, t.done = e, { }
+end
+
+function setters.reset(t)
+ t.done = { }
+ reset(t)
+end
+
+function setters.list(t) -- pattern
+ local list = table.sortedkeys(t.data)
+ local user, system = { }, { }
+ for l=1,#list do
+ local what = list[l]
+ if find(what,"^%*") then
+ system[#system+1] = what
+ else
+ user[#user+1] = what
+ end
+ end
+ return user, system
+end
+
+function setters.show(t)
+ commands.writestatus("","")
+ local list = setters.list(t)
+ local category = t.name
+ for k=1,#list do
+ local name = list[k]
+ local functions = t.data[name]
+ if functions then
+ local value, default, modules = functions.value, functions.default, #functions
+ value = value == nil and "unset" or tostring(value)
+ default = default == nil and "unset" or tostring(default)
+ commands.writestatus(category,format("%-25s modules: %2i default: %5s value: %5s",name,modules,default,value))
+ end
+ end
+ commands.writestatus("","")
+end
+
+-- we could have used a bit of oo and the trackers:enable syntax but
+-- there is already a lot of code around using the singular tracker
+
+-- we could make this into a module
+
+function setters.new(name)
+ local t
+ t = {
+ data = { }, -- indexed, but also default and value fields
+ name = name,
+ enable = function(...) setters.enable (t,...) end,
+ disable = function(...) setters.disable (t,...) end,
+ register = function(...) setters.register(t,...) end,
+ list = function(...) setters.list (t,...) end,
+ show = function(...) setters.show (t,...) end,
+ }
+ data[name] = t
+ return t
+end
+
+trackers = setters.new("trackers")
+directives = setters.new("directives")
+experiments = setters.new("experiments")
+
+-- nice trick: we overload two of the directives related functions with variants that
+-- do tracing (itself using a tracker) .. proof of concept
+
+local trace_directives = false local trace_directives = false trackers.register("system.directives", function(v) trace_directives = v end)
+local trace_experiments = false local trace_experiments = false trackers.register("system.experiments", function(v) trace_experiments = v end)
+
+local e = directives.enable
+local d = directives.disable
+
+function directives.enable(...)
+ (commands.writestatus or logs.report)("directives","enabling: %s",concat({...}," "))
+ e(...)
+end
+
+function directives.disable(...)
+ (commands.writestatus or logs.report)("directives","disabling: %s",concat({...}," "))
+ d(...)
+end
+
+local e = experiments.enable
+local d = experiments.disable
+
+function experiments.enable(...)
+ (commands.writestatus or logs.report)("experiments","enabling: %s",concat({...}," "))
+ e(...)
+end
+
+function experiments.disable(...)
+ (commands.writestatus or logs.report)("experiments","disabling: %s",concat({...}," "))
+ d(...)
+end
+
+-- a useful example
+
+directives.register("system.nostatistics", function(v)
+ statistics.enable = not v
+end)
diff --git a/tex/context/base/trac-tex.lua b/tex/context/base/trac-tex.lua
new file mode 100644
index 000000000..914769c7f
--- /dev/null
+++ b/tex/context/base/trac-tex.lua
@@ -0,0 +1,47 @@
+if not modules then modules = { } end modules ['trac-hsh'] = {
+ version = 1.001,
+ comment = "companion to trac-deb.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- moved from trac-deb.lua
+
+local saved = { }
+
+function trackers.save_hash()
+ saved = tex.hashtokens()
+end
+
+function trackers.dump_hash(filename,delta)
+ local list, hash, command_name = { }, tex.hashtokens(), token.command_name
+ for name, token in next, hash do
+ if not delta or not saved[name] then
+ -- token: cmd, chr, csid -- combination cmd,chr determines name
+ local kind = command_name(token)
+ local dk = list[kind]
+ if not dk then
+ -- a bit funny names but this sorts better (easier to study)
+ dk = { names = { }, found = 0, code = token[1] }
+ list[kind] = dk
+ end
+ dk.names[name] = { token[2], token[3] }
+ dk.found = dk.found + 1
+ end
+ end
+ io.savedata(filename or tex.jobname .. "-hash.log",table.serialize(list,true))
+end
+
+local delta = nil
+
+local function dump_hash(wanteddelta)
+ if delta == nil then
+ saved = saved or tex.hashtokens()
+ luatex.register_stop_actions(1,function() trackers.dump_hash(nil,wanteddelta) end) -- at front
+ end
+ delta = wanteddelta
+end
+
+directives.register("system.dumphash", function() dump_hash(false) end)
+directives.register("system.dumpdelta", function() dump_hash(true ) end)
diff --git a/tex/context/base/trac-tex.mkiv b/tex/context/base/trac-tex.mkiv
index 9c596feab..37eca9123 100644
--- a/tex/context/base/trac-tex.mkiv
+++ b/tex/context/base/trac-tex.mkiv
@@ -13,6 +13,8 @@
\writestatus{loading}{ConTeXt Tracking Macros / TeX}
+\registerctxluafile{trac-tex}{1.001}
+
%D All tracing flags at the \TEX\ end will be redone this way so
%D that we have a similar mechanism for \TEX\ and \LUA. Also, the
%D currently used if's might become conditionals.
@@ -33,23 +35,8 @@
\def\doenabletextracer #1{\csname enabletracer#1\endcsname}
\def\dodisabletextracer#1{\csname disabletracer#1\endcsname}
-% context --directives=system.nostatistics ...
-
-\def\nomkivstatistics{\ctxlua{statistics.enable = false}} % for taco
-
-\def\tracersdumphash {\ctxlua{tracers.register_dump_hash(false)}}
-\def\tracersdumpdelta{\ctxlua{tracers.register_dump_hash(true)}}
-
-% wrong place:
-
-\def\traceluausage
- {\dosingleempty\dotraceluausage}
+% The next one is for Taco, although we can use directives as well:
-\def\dotraceluausage[#1]%
- {\ctxlua{debugger.enable()}%
- \appendtoks
- \ctxlua{debugger.disable() debugger.showstats(print,\doifnumberelse{#1}{#1}{5000})}^
- \to \everybye
- \gdef\dotraceluausage[#1]{}}
+\def\nomkivstatistics{\enabledirectives[system.nostatistics]}
\protect \endinput
diff --git a/tex/context/base/trac-tim.lua b/tex/context/base/trac-tim.lua
index a8725bb5c..18d023982 100644
--- a/tex/context/base/trac-tim.lua
+++ b/tex/context/base/trac-tim.lua
@@ -10,10 +10,9 @@ local format, gsub = string.format, string.gsub
local concat, sort = table.concat, table.sort
local next, tonumber = next, tonumber
-plugins = plugins or { }
-plugins.progress = plugins.progress or { }
+moduledata.progress = moduledata.progress or { }
-local progress = plugins.progress
+local progress = moduledata.progress
progress = progress or { }
@@ -39,7 +38,7 @@ local params = {
local last = os.clock()
local data = { }
-function progress.save()
+function progress.save(name)
io.savedata((name or progress.defaultfilename) .. ".lut",table.serialize(data,true))
data = { }
end
@@ -129,8 +128,8 @@ local function convert(name)
sort(names)
processed[name] = {
names = names,
- top = top,
- bot = bot,
+ top = top,
+ bot = bot,
pages = pages,
paths = paths,
}
@@ -143,18 +142,23 @@ progress.convert = convert
function progress.bot(name,tag)
return convert(name).bot[tag] or 0
end
+
function progress.top(name,tag)
return convert(name).top[tag] or 0
end
+
function progress.pages(name,tag)
return convert(name).pages or 0
end
+
function progress.path(name,tag)
return convert(name).paths[tag] or "origin"
end
+
function progress.nodes(name)
return convert(name).names or { }
end
+
function progress.parameters(name)
return params -- shared
end
diff --git a/tex/context/base/trac-tra.lua b/tex/context/base/trac-tra.lua
index 052e4bba7..916b68045 100644
--- a/tex/context/base/trac-tra.lua
+++ b/tex/context/base/trac-tra.lua
@@ -14,8 +14,8 @@ local debug = require "debug"
local getinfo = debug.getinfo
local type, next = type, next
-local concat = table.concat
-local format, find, lower, gmatch, gsub = string.format, string.find, string.lower, string.gmatch, string.gsub
+local format, find = string.format, string.find
+local is_boolean = string.is_boolean
debugger = debugger or { }
@@ -38,6 +38,7 @@ local function hook()
end
end
end
+
local function getname(func)
local n = names[func]
if n then
@@ -53,6 +54,7 @@ local function getname(func)
return "unknown"
end
end
+
function debugger.showstats(printer,threshold)
printer = printer or texio.write or print
threshold = threshold or 0
@@ -124,13 +126,17 @@ function debugger.disable()
--~ counters[debug.getinfo(2,"f").func] = nil
end
-function debugger.tracing()
- local n = tonumber(os.env['MTX.TRACE.CALLS']) or tonumber(os.env['MTX_TRACE_CALLS']) or 0
- if n > 0 then
- function debugger.tracing() return true end ; return true
- else
- function debugger.tracing() return false end ; return false
- end
+local function trace_calls(n)
+ debugger.enable()
+ luatex.register_stop_actions(function()
+ debugger.disable()
+ debugger.savestats(tex.jobname .. "-luacalls.log",tonumber(n))
+ end)
+ trace_calls = function() end
+end
+
+if directives then
+ directives.register("system.tracecalls", function(n) trace_calls(n) end) -- indirect is needed for nilling
end
--~ debugger.enable()
@@ -147,195 +153,3 @@ end
--~ debugger.showstats()
--~ print("")
--~ debugger.showstats(print,3)
-
-setters = setters or { }
-setters.data = setters.data or { }
-
---~ local function set(t,what,value)
---~ local data, done = t.data, t.done
---~ if type(what) == "string" then
---~ what = aux.settings_to_array(what) -- inefficient but ok
---~ end
---~ for i=1,#what do
---~ local w = what[i]
---~ for d, f in next, data do
---~ if done[d] then
---~ -- prevent recursion due to wildcards
---~ elseif find(d,w) then
---~ done[d] = true
---~ for i=1,#f do
---~ f[i](value)
---~ end
---~ end
---~ end
---~ end
---~ end
-
-local function set(t,what,value)
- local data, done = t.data, t.done
- if type(what) == "string" then
- what = aux.settings_to_hash(what) -- inefficient but ok
- end
- for w, v in next, what do
- if v == "" then
- v = value
- else
- v = toboolean(v)
- end
- for d, f in next, data do
- if done[d] then
- -- prevent recursion due to wildcards
- elseif find(d,w) then
- done[d] = true
- for i=1,#f do
- f[i](v)
- end
- end
- end
- end
-end
-
-local function reset(t)
- for d, f in next, t.data do
- for i=1,#f do
- f[i](false)
- end
- end
-end
-
-local function enable(t,what)
- set(t,what,true)
-end
-
-local function disable(t,what)
- local data = t.data
- if not what or what == "" then
- t.done = { }
- reset(t)
- else
- set(t,what,false)
- end
-end
-
-function setters.register(t,what,...)
- local data = t.data
- what = lower(what)
- local w = data[what]
- if not w then
- w = { }
- data[what] = w
- end
- for _, fnc in next, { ... } do
- local typ = type(fnc)
- if typ == "function" then
- w[#w+1] = fnc
- elseif typ == "string" then
- w[#w+1] = function(value) set(t,fnc,value,nesting) end
- end
- end
-end
-
-function setters.enable(t,what)
- local e = t.enable
- t.enable, t.done = enable, { }
- enable(t,string.simpleesc(tostring(what)))
- t.enable, t.done = e, { }
-end
-
-function setters.disable(t,what)
- local e = t.disable
- t.disable, t.done = disable, { }
- disable(t,string.simpleesc(tostring(what)))
- t.disable, t.done = e, { }
-end
-
-function setters.reset(t)
- t.done = { }
- reset(t)
-end
-
-function setters.list(t) -- pattern
- local list = table.sortedkeys(t.data)
- local user, system = { }, { }
- for l=1,#list do
- local what = list[l]
- if find(what,"^%*") then
- system[#system+1] = what
- else
- user[#user+1] = what
- end
- end
- return user, system
-end
-
-function setters.show(t)
- commands.writestatus("","")
- local list = setters.list(t)
- for k=1,#list do
- commands.writestatus(t.name,list[k])
- end
- commands.writestatus("","")
-end
-
--- we could have used a bit of oo and the trackers:enable syntax but
--- there is already a lot of code around using the singular tracker
-
--- we could make this into a module
-
-function setters.new(name)
- local t
- t = {
- data = { },
- name = name,
- enable = function(...) setters.enable (t,...) end,
- disable = function(...) setters.disable (t,...) end,
- register = function(...) setters.register(t,...) end,
- list = function(...) setters.list (t,...) end,
- show = function(...) setters.show (t,...) end,
- }
- setters.data[name] = t
- return t
-end
-
-trackers = setters.new("trackers")
-directives = setters.new("directives")
-experiments = setters.new("experiments")
-
--- nice trick: we overload two of the directives related functions with variants that
--- do tracing (itself using a tracker) .. proof of concept
-
-local trace_directives = false local trace_directives = false trackers.register("system.directives", function(v) trace_directives = v end)
-local trace_experiments = false local trace_experiments = false trackers.register("system.experiments", function(v) trace_experiments = v end)
-
-local e = directives.enable
-local d = directives.disable
-
-function directives.enable(...)
- commands.writestatus("directives","enabling: %s",concat({...}," "))
- e(...)
-end
-
-function directives.disable(...)
- commands.writestatus("directives","disabling: %s",concat({...}," "))
- d(...)
-end
-
-local e = experiments.enable
-local d = experiments.disable
-
-function experiments.enable(...)
- commands.writestatus("experiments","enabling: %s",concat({...}," "))
- e(...)
-end
-
-function experiments.disable(...)
- commands.writestatus("experiments","disabling: %s",concat({...}," "))
- d(...)
-end
-
--- a useful example
-
-directives.register("system.nostatistics", function(v)
- statistics.enable = not v
-end)
-
diff --git a/tex/context/base/type-dejavu.mkiv b/tex/context/base/type-dejavu.mkiv
new file mode 100644
index 000000000..116d3ca4c
--- /dev/null
+++ b/tex/context/base/type-dejavu.mkiv
@@ -0,0 +1,47 @@
+%D \module
+%D [ file=type-dejavu,
+%D version=2010.06.21,
+%D title=\CONTEXT\ Typescript Macros,
+%D subtitle=Dejavu fonts (dejavu-fonts.org),
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright=PRAGMA ADE, NL]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\starttypescriptcollection[dejavu]
+
+ \starttypescript [serif] [dejavu] [name]
+ \setups[font:fallback:serif]
+ \definefontsynonym [Serif] [name:dejavuserif] [features=default]
+ \definefontsynonym [SerifBold] [name:dejavuserifitalic] [features=default]
+ \definefontsynonym [SerifItalic] [name:dejavuserifbold] [features=default]
+ \definefontsynonym [SerifBoldItalic] [name:dejavuserifbolditalic] [features=default]
+ \stoptypescript
+
+ \starttypescript [sans] [dejavu] [name]
+ \setups[font:fallback:sans]
+ \definefontsynonym [Sans] [name:dejavusans] [features=default]
+ \definefontsynonym [SansBold] [name:dejavusansoblique] [features=default]
+ \definefontsynonym [SansItalic] [name:dejavusansbold] [features=default]
+ \definefontsynonym [SansBoldItalic] [name:dejavusansboldoblique] [features=default]
+ \stoptypescript
+
+ \starttypescript [mono] [dejavu] [name]
+ \setups[font:fallback:mono]
+ \definefontsynonym [Mono] [name:dejavusansmono] [features=default]
+ \definefontsynonym [MonoBold] [name:dejavusansmonooblique] [features=default]
+ \definefontsynonym [MonoItalic] [name:dejavusansmonobold] [features=default]
+ \definefontsynonym [MonoBoldItalic] [name:dejavusansmonoboldoblique] [features=default]
+ \stoptypescript
+
+ \starttypescript[dejavu]
+ \definetypeface [dejavu] [rm] [serif] [dejavu] [default]
+ \definetypeface [dejavu] [ss] [sans] [dejavu] [default]
+ \definetypeface [dejavu] [tt] [mono] [dejavu] [default]
+ \definetypeface [dejavu] [mm] [math] [xits] [default] [rscale=auto]
+ \stoptypescript
+
+\stoptypescriptcollection
diff --git a/tex/context/base/type-one.mkii b/tex/context/base/type-one.mkii
index efe31ed21..14e97a9bb 100644
--- a/tex/context/base/type-one.mkii
+++ b/tex/context/base/type-one.mkii
@@ -1533,16 +1533,16 @@
\definefontsynonym[Iwona-Medium-Italic] [\typescriptthree-iwonami] [encoding=\typescriptthree]
\definefontsynonym[Iwona-Heavy-Regular] [\typescriptthree-iwonah] [encoding=\typescriptthree]
\definefontsynonym[Iwona-Heavy-Italic] [\typescriptthree-iwonahi] [encoding=\typescriptthree]
- \definefontsynonym[Iwona-CapsRegular] [\typescriptthree-iwonarcap] [encoding=\typescriptthree]
- \definefontsynonym[Iwona-CapsItalic] [\typescriptthree-iwonaricap] [encoding=\typescriptthree]
- \definefontsynonym[Iwona-CapsBold] [\typescriptthree-iwonabcap] [encoding=\typescriptthree]
- \definefontsynonym[Iwona-CapsBoldItalic] [\typescriptthree-iwonabicap] [encoding=\typescriptthree]
- \definefontsynonym[Iwona-CapsLight-Regular] [\typescriptthree-iwonalcap] [encoding=\typescriptthree]
- \definefontsynonym[Iwona-CapsLight-Italic] [\typescriptthree-iwonalicap] [encoding=\typescriptthree]
- \definefontsynonym[Iwona-CapsMedium-Regular] [\typescriptthree-iwonamcap] [encoding=\typescriptthree]
- \definefontsynonym[Iwona-CapsMedium-Italic] [\typescriptthree-iwonamicap] [encoding=\typescriptthree]
- \definefontsynonym[Iwona-CapsHeavy-Regular] [\typescriptthree-iwonahcap] [encoding=\typescriptthree]
- \definefontsynonym[Iwona-CapsHeavy-Italic] [\typescriptthree-iwonahicap] [encoding=\typescriptthree]
+ \definefontsynonym[Iwona-CapsRegular] [\typescriptthree-iwonar-sc] [encoding=\typescriptthree]
+ \definefontsynonym[Iwona-CapsItalic] [\typescriptthree-iwonari-sc] [encoding=\typescriptthree]
+ \definefontsynonym[Iwona-CapsBold] [\typescriptthree-iwonab-sc] [encoding=\typescriptthree]
+ \definefontsynonym[Iwona-CapsBoldItalic] [\typescriptthree-iwonabi-sc] [encoding=\typescriptthree]
+ \definefontsynonym[Iwona-CapsLight-Regular] [\typescriptthree-iwonal-sc] [encoding=\typescriptthree]
+ \definefontsynonym[Iwona-CapsLight-Italic] [\typescriptthree-iwonali-sc] [encoding=\typescriptthree]
+ \definefontsynonym[Iwona-CapsMedium-Regular] [\typescriptthree-iwonam-sc] [encoding=\typescriptthree]
+ \definefontsynonym[Iwona-CapsMedium-Italic] [\typescriptthree-iwonami-sc] [encoding=\typescriptthree]
+ \definefontsynonym[Iwona-CapsHeavy-Regular] [\typescriptthree-iwonah-sc] [encoding=\typescriptthree]
+ \definefontsynonym[Iwona-CapsHeavy-Italic] [\typescriptthree-iwonahi-sc] [encoding=\typescriptthree]
\definefontsynonym[Iwona-CondRegular] [\typescriptthree-iwonacr] [encoding=\typescriptthree]
\definefontsynonym[Iwona-CondItalic] [\typescriptthree-iwonacri] [encoding=\typescriptthree]
\definefontsynonym[Iwona-CondBold] [\typescriptthree-iwonacb] [encoding=\typescriptthree]
@@ -1553,16 +1553,16 @@
\definefontsynonym[Iwona-CondMedium-Italic] [\typescriptthree-iwonacmi] [encoding=\typescriptthree]
\definefontsynonym[Iwona-CondHeavy-Regular] [\typescriptthree-iwonach] [encoding=\typescriptthree]
\definefontsynonym[Iwona-CondHeavy-Italic] [\typescriptthree-iwonachi] [encoding=\typescriptthree]
- \definefontsynonym[Iwona-CapsCondRegular] [\typescriptthree-iwonacrcap] [encoding=\typescriptthree]
- \definefontsynonym[Iwona-CapsCondItalic] [\typescriptthree-iwonacricap] [encoding=\typescriptthree]
- \definefontsynonym[Iwona-CapsCondBold] [\typescriptthree-iwonacbcap] [encoding=\typescriptthree]
- \definefontsynonym[Iwona-CapsCondBoldItalic] [\typescriptthree-iwonacbicap] [encoding=\typescriptthree]
- \definefontsynonym[Iwona-CapsCondLight-Regular] [\typescriptthree-iwonaclcap] [encoding=\typescriptthree]
- \definefontsynonym[Iwona-CapsCondLight-Italic] [\typescriptthree-iwonaclicap] [encoding=\typescriptthree]
- \definefontsynonym[Iwona-CapsCondMedium-Regular][\typescriptthree-iwonacmcap] [encoding=\typescriptthree]
- \definefontsynonym[Iwona-CapsCondMedium-Italic] [\typescriptthree-iwonacmicap] [encoding=\typescriptthree]
- \definefontsynonym[Iwona-CapsCondHeavy-Regular] [\typescriptthree-iwonachcap] [encoding=\typescriptthree]
- \definefontsynonym[Iwona-CapsCondHeavy-Italic] [\typescriptthree-iwonachicap] [encoding=\typescriptthree]
+ \definefontsynonym[Iwona-CapsCondRegular] [\typescriptthree-iwonacr-sc] [encoding=\typescriptthree]
+ \definefontsynonym[Iwona-CapsCondItalic] [\typescriptthree-iwonacri-sc] [encoding=\typescriptthree]
+ \definefontsynonym[Iwona-CapsCondBold] [\typescriptthree-iwonacb-sc] [encoding=\typescriptthree]
+ \definefontsynonym[Iwona-CapsCondBoldItalic] [\typescriptthree-iwonacbi-sc] [encoding=\typescriptthree]
+ \definefontsynonym[Iwona-CapsCondLight-Regular] [\typescriptthree-iwonacl-sc] [encoding=\typescriptthree]
+ \definefontsynonym[Iwona-CapsCondLight-Italic] [\typescriptthree-iwonacli-sc] [encoding=\typescriptthree]
+ \definefontsynonym[Iwona-CapsCondMedium-Regular][\typescriptthree-iwonacm-sc] [encoding=\typescriptthree]
+ \definefontsynonym[Iwona-CapsCondMedium-Italic] [\typescriptthree-iwonacmi-sc] [encoding=\typescriptthree]
+ \definefontsynonym[Iwona-CapsCondHeavy-Regular] [\typescriptthree-iwonach-sc] [encoding=\typescriptthree]
+ \definefontsynonym[Iwona-CapsCondHeavy-Italic] [\typescriptthree-iwonachi-sc] [encoding=\typescriptthree]
\loadmapfile[iwona-\typescriptthree.map]
\stoptypescript
diff --git a/tex/context/base/type-otf.mkiv b/tex/context/base/type-otf.mkiv
index 486fa1a57..178e32b4f 100644
--- a/tex/context/base/type-otf.mkiv
+++ b/tex/context/base/type-otf.mkiv
@@ -16,7 +16,6 @@
%D in good old \TEX, and these may differ a bit. Here we also see
%D some oldstyle definitions which normally are done with features.
-
% \starttypescriptcollection[myfonts]
%
% \starttypescript [serif] [myserif] [name]
@@ -1493,7 +1492,7 @@
\stoptypescript
\starttypescript [math] [asana] [name]
- \definefontsynonym [MathRoman] [AsanaMath] [\s!features=math\mathsizesuffix]
+ \definefontsynonym [MathRoman] [AsanaMath] [\s!features=\s!math\mathsizesuffix]
\stoptypescript
\starttypescript[asana]
@@ -1528,13 +1527,13 @@
\stoptypescript
\starttypescript [math] [cambria,cambria-m,cambria-a] [name]
- \definefontsynonym [MathRoman] [CambriaMath] [\s!features=math\mathsizesuffix]
+ \definefontsynonym [MathRoman] [CambriaMath] [\s!features=\s!math\mathsizesuffix]
\stoptypescript
\starttypescript [math] [cambria-x] [name]
- \definefontsynonym [MathRoman] [CambriaMath] [\s!features=math]
+ \definefontsynonym [MathRoman] [CambriaMath] [\s!features=\s!math]
\stoptypescript
\starttypescript [math] [cambria-y] [name]
- \definefontsynonym [MathRoman] [CambriaMath] [\s!features=math-nostack\mathsizesuffix]
+ \definefontsynonym [MathRoman] [CambriaMath] [\s!features=\s!math-nostack\mathsizesuffix]
\stoptypescript
\starttypescript [serif] [cambria,cambria-m,cambria-a] [name]
@@ -1647,7 +1646,7 @@
\starttypescriptcollection[liberation]
- \starttypescript [serif] [liberationserif] [name]
+ \starttypescript [serif] [liberation] [name]
\setups[\s!font:\s!fallback:\s!serif]
\definefontsynonym [\s!Serif] [\s!file:liberationserif-regular] [\s!features=\s!default]
\definefontsynonym [\s!SerifBold] [\s!file:liberationserif-bold] [\s!features=\s!default]
@@ -1655,7 +1654,7 @@
\definefontsynonym [\s!SerifBoldItalic] [\s!file:liberationserif-bolditalic] [\s!features=\s!default]
\stoptypescript
- \starttypescript [sans] [liberationsans] [name]
+ \starttypescript [sans] [liberation] [name]
\setups[\s!font:\s!fallback:\s!sans]
\definefontsynonym [\s!Sans] [\s!file:liberationsans-regular] [\s!features=\s!default]
\definefontsynonym [\s!SansBold] [\s!file:liberationsans-bold] [\s!features=\s!default]
@@ -1663,7 +1662,7 @@
\definefontsynonym [\s!SansBoldItalic] [\s!file:liberationsans-bolditalic] [\s!features=\s!default]
\stoptypescript
- \starttypescript [mono] [liberationmono] [name]
+ \starttypescript [mono] [liberation] [name]
\setups[\s!font:\s!fallback:\s!mono]
\definefontsynonym [\s!Mono] [\s!file:liberationmono-regular] [\s!features=\s!default]
\definefontsynonym [\s!MonoBold] [\s!file:liberationmono-bold] [\s!features=\s!default]
@@ -1672,10 +1671,10 @@
\stoptypescript
\starttypescript[liberation]
- \definetypeface [liberation] [rm] [serif] [liberationserif] [default]
- \definetypeface [liberation] [ss] [sans] [liberationsans] [default] [rscale=0.870]
- \definetypeface [liberation] [tt] [mono] [liberationmono] [default] [rscale=0.870]
- \definetypeface [liberation] [mm] [math] [times] [default] [rscale=1.040]
+ \definetypeface [liberation] [rm] [serif] [liberation] [default]
+ \definetypeface [liberation] [ss] [sans] [liberation] [default] [rscale=0.870]
+ \definetypeface [liberation] [tt] [mono] [liberation] [default] [rscale=0.870]
+ \definetypeface [liberation] [mm] [math] [times] [default] [rscale=1.040]
\stoptypescript
\stoptypescriptcollection
@@ -1762,8 +1761,8 @@
\stoptypescript
\starttypescript [math] [euler] [name]
- % \definefontsynonym [MathRoman] [EulerMath] [\s!features=math]
- \definefontsynonym [MathRoman] [EulerMath] [\s!features=math\mathsizesuffix]
+ % \definefontsynonym [MathRoman] [EulerMath] [\s!features=\s!math]
+ \definefontsynonym [MathRoman] [EulerMath] [\s!features=\s!math\mathsizesuffix]
\stoptypescript
\starttypescript [pagella-euler]
@@ -1788,6 +1787,59 @@
\stoptypescriptcollection
+\starttypescriptcollection[stix]
+
+ % This typescript is only provided to keep an eye on developments of this font
+ % but currenty these are not proper opentype math fonts (for instance they have
+ % no math table yet). We will not make a virtual font for this as eventually
+ % there will be a decent version. Beware, we force an otf suffix as there happen
+ % to be ttf files as well. BTW, why 'italic' infull and 'bol' without 'd'?
+
+ \starttypescript [math] [stix] [name]
+ \definefontsynonym[MathRoman][\s!file:stixgeneral.otf] [\s!features=\s!math]
+ \stoptypescript
+
+ \starttypescript [serif] [stix] [name]
+ \setups[\s!font:\s!fallback:\s!serif]
+ \definefontsynonym[\s!Serif] [\s!file:stixgeneral.otf] [\s!features=\s!default]
+ \definefontsynonym[\s!SerifBold] [\s!file:stixgeneralbol.otf] [\s!features=\s!default]
+ \definefontsynonym[\s!SerifItalic] [\s!file:stixgeneralitalic.otf] [\s!features=\s!default]
+ \definefontsynonym[\s!SerifBoldItalic][\s!file:stixgeneralbolita.otf] [\s!features=\s!default]
+ \stoptypescript
+
+ \starttypescript[stix]
+ \definetypeface [stix] [rm] [\s!serif] [stix] [\s!default]
+ \definetypeface [stix] [mm] [\s!math] [stix] [\s!default]
+ \stoptypescript
+
+\stoptypescriptcollection
+
+\starttypescriptcollection[xits]
+
+ % This one makes more sense. Xits uses the glyph collection from stix but packages
+ % it in a proper OpenType Math font.
+
+ \starttypescript [math] [xits] [name]
+ \definefontsynonym[MathRoman][file:xits-math.otf][\s!features=\s!math\mathsizesuffix]
+ \stoptypescript
+
+ \starttypescript [serif] [xits] [name]
+ \setups[\s!font:\s!fallback:\s!serif]
+ \definefontsynonym[\s!Serif] [\s!file:xits-regular.otf] [\s!features=\s!default]
+ \definefontsynonym[\s!SerifBold] [\s!file:xits-bold.otf] [\s!features=\s!default]
+ \definefontsynonym[\s!SerifItalic] [\s!file:xits-italic.otf] [\s!features=\s!default]
+ \definefontsynonym[\s!SerifBoldItalic][\s!file:xits-bolditalic.otf] [\s!features=\s!default]
+ \stoptypescript
+
+ \starttypescript[xits]
+ \definetypeface [xits] [rm] [\s!serif] [xits] [\s!default]
+ \definetypeface [xits] [ss] [\s!sans] [heros] [\s!default] [\s!rscale=0.9]
+ \definetypeface [xits] [tt] [\s!mono] [modern] [\s!default] [\s!rscale=1.05]
+ \definetypeface [xits] [mm] [\s!math] [xits] [\s!default]
+ \stoptypescript
+
+\stoptypescriptcollection
+
% \starttypescript [math] [hvmath]
% \definefontsynonym[MathRoman][hvmath@hvmath-math]
% \loadfontgoodies[hvmath-math]
diff --git a/tex/context/base/typo-cap.lua b/tex/context/base/typo-cap.lua
index 5f741da7c..a29e45810 100644
--- a/tex/context/base/typo-cap.lua
+++ b/tex/context/base/typo-cap.lua
@@ -8,13 +8,16 @@ if not modules then modules = { } end modules ['typo-cap'] = {
local next, type = next, type
local format, insert = string.format, table.insert
+local div = math.div
-local trace_casing = false trackers.register("nodes.casing", function(v) trace_casing = v end)
+local trace_casing = false trackers.register("typesetting.casing", function(v) trace_casing = v end)
-local has_attribute = node.has_attribute
-local unset_attribute = node.unset_attribute
-local set_attribute = node.set_attribute
-local traverse_id = node.traverse_id
+local report_casing = logs.new("casing")
+
+local has_attribute = node.has_attribute
+local unset_attribute = node.unset_attribute
+local set_attribute = node.set_attribute
+local traverse_id = node.traverse_id
local glyph = node.id("glyph")
local kern = node.id("kern")
@@ -23,14 +26,28 @@ local fontdata = fonts.ids
local fontchar = fonts.chr
local chardata = characters.data
-cases = cases or { }
+typesetting = typesetting or { }
+typesetting.cases = typesetting.cases or { }
+
+local cases = typesetting.cases
+
cases.actions = { }
-cases.attribute = attributes.private("case")
+cases.attribute = attributes.private("case") -- no longer needed
+
+local a_cases = cases.attribute
local actions = cases.actions
local lastfont = nil
--- we use char0 as placeholder for the larger font
+-- we use char(0) as placeholder for the larger font, so we need to remove it
+-- before it can do further harm
+--
+-- we could do the whole glyph run here (till no more attributes match) but
+-- then we end up with more code .. maybe i will clean this up anyway as the
+-- lastfont hack is somewhat ugly .. on the other hand, we need to deal with
+-- cases like:
+--
+-- \WORD {far too \Word{many \WORD{more \word{pushed} in between} useless} words}
local function helper(start, code, codes, special, attribute, once)
local char = start.char
@@ -38,6 +55,7 @@ local function helper(start, code, codes, special, attribute, once)
if dc then
local fnt = start.font
if special then
+ -- will become function
if start.char == 0 then
lastfont = fnt
local prev, next = start.prev, start.next
@@ -51,7 +69,6 @@ local function helper(start, code, codes, special, attribute, once)
start.font = lastfont
end
end
- -- local ifc = fontdata[fnt].characters
local ifc = fontchar[fnt]
local ucs = dc[codes]
if ucs then
@@ -106,7 +123,7 @@ actions[2] = function(start,attribute)
return helper(start,'lccode','lccodes')
end
-actions[3] = function(start,attribute)
+actions[3] = function(start,attribute,attr)
lastfont = nil
local prev = start.prev
if prev and prev.id == kern and prev.subtype == 0 then
@@ -115,10 +132,14 @@ actions[3] = function(start,attribute)
if not prev or prev.id ~= glyph then
--- only the first character is treated
for n in traverse_id(glyph,start.next) do
- if has_attribute(n,attribute) then
+ if has_attribute(n,attribute) == attr then
unset_attribute(n,attribute)
+ else
+ -- break -- we can have nested mess
end
end
+ -- we could return the last in the range and save some scanning
+ -- but why bother
return helper(start,'uccode','uccodes')
else
return start, false
@@ -174,37 +195,67 @@ actions[8] = function(start)
end
end
end
- else
- return start, false
end
+ return start, false
end
-- node.traverse_id_attr
-function cases.process(namespace,attribute,head) -- not real fast but also not used on much data
+local function process(namespace,attribute,head) -- not real fast but also not used on much data
lastfont = nil
+ local lastattr = nil
local done = false
- for start in traverse_id(glyph,head) do
- local attr = has_attribute(start,attribute)
- if attr and attr > 0 then
- unset_attribute(start,attribute)
- local action = actions[attr]
- if action then
- local _, ok = action(start,attribute)
- done = done and ok
+ local start = head
+ while start do -- while because start can jump ahead
+ local id = start.id
+ if id == glyph then
+ local attr = has_attribute(start,attribute)
+ if attr and attr > 0 then
+ if attr ~= lastattr then
+ lastfont = nil
+ lastattr = attr
+ end
+ unset_attribute(start,attribute)
+ local action = actions[attr%100] -- map back to low number
+ if action then
+ start, ok = action(start,attribute,attr)
+ done = done and ok
+ if trace_casing then
+ report_casing("case trigger %s, instance %s, result %s",attr%100,div(attr,100),tostring(ok))
+ end
+ elseif trace_casing then
+ report_casing("unknown case trigger %s",attr)
+ end
end
end
+ if start then
+ start = start.next
+ end
end
lastfont = nil
return head, done
end
-chars.handle_casing = nodes.install_attribute_handler {
- name = "case",
- namespace = cases,
- processor = cases.process,
-}
+local m = 0 -- a trick to make neighbouring ranges work
-function cases.enable()
- tasks.enableaction("processors","chars.handle_casing")
+function cases.set(n)
+ if trace_casing then
+ report_casing("enabling case handler")
+ end
+ tasks.enableaction("processors","typesetting.cases.handler")
+ function cases.set(n)
+ if m == 100 then
+ m = 1
+ else
+ m = m + 1
+ end
+ tex.attribute[a_cases] = m * 100 + n
+ end
+ cases.set(n)
end
+
+cases.handler = nodes.install_attribute_handler {
+ name = "case",
+ namespace = cases,
+ processor = process,
+}
diff --git a/tex/context/base/typo-cap.mkiv b/tex/context/base/typo-cap.mkiv
index af4e12bc2..aa6683ca1 100644
--- a/tex/context/base/typo-cap.mkiv
+++ b/tex/context/base/typo-cap.mkiv
@@ -51,10 +51,8 @@
% test \word{test TEST \TeX} test
% test \Word{test TEST \TeX} test
-\unexpanded\def\setcharactercasing
- {\ctxlua{cases.enable()}%
- \gdef\setcharactercasing[##1]{\attribute\caseattribute##1\relax}%
- \setcharactercasing}
+\unexpanded\def\setcharactercasing[#1]%
+ {\ctxlua{typesetting.cases.set(\number#1)}}
% todo: names casings
diff --git a/tex/context/base/typo-dig.lua b/tex/context/base/typo-dig.lua
index c1b44e39f..9ee57a794 100644
--- a/tex/context/base/typo-dig.lua
+++ b/tex/context/base/typo-dig.lua
@@ -6,11 +6,16 @@ if not modules then modules = { } end modules ['typo-dig'] = {
license = "see context related readme files"
}
+-- we might consider doing this after the otf pass because now osf do not work
+-- out well in node mode.
+
local next, type = next, type
local format, insert = string.format, table.insert
-local round = math.round
+local round, div = math.round, math.div
+
+local trace_digits = false trackers.register("typesetting.digits", function(v) trace_digits = v end)
-local trace_digits = false trackers.register("nodes.digits", function(v) trace_digits = v end)
+local report_digits = logs.new("digits")
local has_attribute = node.has_attribute
local unset_attribute = node.unset_attribute
@@ -30,24 +35,30 @@ local chardata = fonts.characters
local quaddata = fonts.quads
local charbase = characters.data
-digits = digits or { }
+typesetting = typesetting or { }
+typesetting.digits = typesetting.digits or { }
+
+local digits = typesetting.digits
+
digits.actions = { }
digits.attribute = attributes.private("digits")
+local a_digits = digits.attribute
+
local actions = digits.actions
-- at some point we can manipulate the glyph node so then i need
--- to rewrite this
+-- to rewrite this then
-function nodes.aligned(start,stop,width,how)
- local prv, nxt, head = start.prev, stop.next, nil
- start.prev, stop.next = nil, nil
+function nodes.aligned(head,start,stop,width,how)
if how == "flushright" or how == "middle" then
- head, start = insert_before(start,start,new_glue(0,65536,65536))
+ head, start = insert_before(head,start,new_glue(0,65536,65536))
end
if how == "flushleft" or how == "middle" then
- head, stop = insert_after(start,stop,new_glue(0,65536,65536))
+ head, stop = insert_after(head,stop,new_glue(0,65536,65536))
end
+ local prv, nxt = start.prev, stop.next
+ start.prev, stop.next = nil, nil
local packed = hpack_node(start,width,"exactly") -- no directional mess here, just lr
if prv then
prv.next, packed.prev = packed, prv
@@ -55,38 +66,45 @@ function nodes.aligned(start,stop,width,how)
if nxt then
nxt.prev, packed.next = packed, nxt
end
- return packed, prv, nxt
+ if packed.prev then
+ return head, packed
+ else
+ return packed, packed
+ end
end
-actions[1] = function(start,attribute)
+actions[1] = function(head,start,attribute,attr)
+ local font = start.font
local char = start.char
- if charbase[char].category == "nd" then
- local font = start.font
+ local unic = chardata[font][char].tounicode
+ local what = unic and tonumber(unic,16) or char
+ if charbase[what].category == "nd" then
local oldwidth, newwidth = start.width, fonts.get_digit_width(font)
if newwidth ~= oldwidth then
- local start = nodes.aligned(start,start,newwidth,"middle") -- return three node pointers
- return start, true
+ if trace_digits then
+ report_digits("digit trigger %s, instance %s, char 0x%05X, unicode 0x%05X, delta %s",
+ attr%100,div(attr,100),char,what,newwidth-oldwidth)
+ end
+ head, start = nodes.aligned(head,start,start,newwidth,"middle")
+ return head, start, true
end
end
- return start, false
+ return head, start, false
end
-function digits.process(namespace,attribute,head)
+local function process(namespace,attribute,head)
local done, current, ok = false, head, false
while current do
if current.id == glyph then
local attr = has_attribute(current,attribute)
if attr and attr > 0 then
unset_attribute(current,attribute)
- local action = actions[attr]
+ local action = actions[attr%100] -- map back to low number
if action then
- if current == head then
- head, ok = action(current,attribute)
- current = head
- else
- current, ok = action(current,attribute)
- end
+ head, current, ok = action(head,current,attribute,attr)
done = done and ok
+ elseif trace_digits then
+ report_digits("unknown digit trigger %s",attr)
end
end
end
@@ -95,12 +113,26 @@ function digits.process(namespace,attribute,head)
return head, done
end
-chars.handle_digits = nodes.install_attribute_handler {
- name = "digits",
- namespace = digits,
- processor = digits.process,
-}
+local m = 0 -- a trick to make neighbouring ranges work
-function digits.enable()
- tasks.enableaction("processors","chars.handle_digits")
+function digits.set(n)
+ if trace_digits then
+ report_digits("enabling digit handler")
+ end
+ tasks.enableaction("processors","typesetting.digits.handler")
+ function digits.set(n)
+ if m == 100 then
+ m = 1
+ else
+ m = m + 1
+ end
+ tex.attribute[a_digits] = m * 100 + n
+ end
+ digits.set(n)
end
+
+digits.handler = nodes.install_attribute_handler {
+ name = "digits",
+ namespace = digits,
+ processor = process,
+}
diff --git a/tex/context/base/typo-dig.mkiv b/tex/context/base/typo-dig.mkiv
index d8f731418..df00d9cef 100644
--- a/tex/context/base/typo-dig.mkiv
+++ b/tex/context/base/typo-dig.mkiv
@@ -24,26 +24,22 @@
%D \macros
%D {\equaldigits}
%D
-%D \starttyping
+%D \startbuffer
%D test test \ruledhbox{123} test test\par
%D test test \ruledhbox{\equaldigits{123}} test test\par
%D test test \equaldigits{123} test test\par
-%D \stoptyping
+%D \stopbuffer
%D
%D \typebuffer
%D
%D This calls result in:
%D
-%D \startvoorbeeld
%D \startlines
%D \getbuffer
%D \stoplines
-%D \stopvoorbeeld
-\unexpanded\def\setdigitsmanipulation
- {\ctxlua{digits.enable()}%
- \gdef\setdigitsmanipulation[##1]{\attribute\digitsattribute##1\relax}%
- \setdigitsmanipulation}
+\unexpanded\def\setdigitsmanipulation[#1]%
+ {\ctxlua{typesetting.digits.set(\number#1)}}
\unexpanded\def\equaldigits{\groupedcommand{\setdigitsmanipulation[\plusone]}{}}
\unexpanded\def\dummydigit {\hphantom{\setdigitsmanipulation[\plusone]0}}
diff --git a/tex/context/base/typo-mir.lua b/tex/context/base/typo-mir.lua
index 6c119c2f2..435400ceb 100644
--- a/tex/context/base/typo-mir.lua
+++ b/tex/context/base/typo-mir.lua
@@ -16,6 +16,8 @@ local utfchar = utf.char
local trace_mirroring = false trackers.register("nodes.mirroring", function(v) trace_mirroring = v end)
+local report_bidi = logs.new("bidi")
+
local has_attribute = node.has_attribute
local unset_attribute = node.unset_attribute
local set_attribute = node.set_attribute
@@ -330,7 +332,7 @@ function mirroring.process(namespace,attribute,start) -- todo: make faster
else
autodir = 1
end
- embeddded = autodir
+ embedded = autodir
if trace_mirroring then
list[#list+1] = format("pardir %s",dir)
end
@@ -374,11 +376,11 @@ function mirroring.process(namespace,attribute,start) -- todo: make faster
end
end
if trace_mirroring and glyphs then
- logs.report("bidi","start log")
+ report_bidi("start log")
for i=1,#list do
- logs.report("bidi","%02i: %s",i,list[i])
+ report_bidi("%02i: %s",i,list[i])
end
- logs.report("bidi","stop log")
+ report_bidi("stop log")
end
if done and mirroring.strip then
local n = #obsolete
@@ -386,7 +388,7 @@ function mirroring.process(namespace,attribute,start) -- todo: make faster
for i=1,n do
remove_node(head,obsolete[i],true)
end
- logs.report("bidi","%s character nodes removed",n)
+ report_bidi("%s character nodes removed",n)
end
end
return head, done
diff --git a/tex/context/base/typo-rep.lua b/tex/context/base/typo-rep.lua
index 6fde21482..c70e90f27 100644
--- a/tex/context/base/typo-rep.lua
+++ b/tex/context/base/typo-rep.lua
@@ -13,6 +13,8 @@ if not modules then modules = { } end modules ['typo-rep'] = {
local trace_stripping = false trackers.register("nodes.stripping", function(v) trace_stripping = v end)
trackers.register("fonts.stripping", function(v) trace_stripping = v end)
+local report_fonts = logs.new("fonts")
+
local delete_node = nodes.delete
local replace_node = nodes.replace
local copy_node = node.copy
@@ -43,20 +45,20 @@ end
local function process(what,head,current,char)
if what == true then
if trace_stripping then
- logs.report("fonts","deleting 0x%05X from text",char)
+ report_fonts("deleting 0x%05X from text",char)
end
head, current = delete_node(head,current)
elseif type(what) == "function" then
head, current = what(head,current)
current = current.next
if trace_stripping then
- logs.report("fonts","processing 0x%05X in text",char)
+ report_fonts("processing 0x%05X in text",char)
end
elseif what then -- assume node
head, current = replace_node(head,current,copy_node(what))
current = current.next
if trace_stripping then
- logs.report("fonts","replacing 0x%05X in text",char)
+ report_fonts("replacing 0x%05X in text",char)
end
end
return head, current
diff --git a/tex/context/base/typo-rep.mkiv b/tex/context/base/typo-rep.mkiv
index 2f1d8b4cb..123ab8830 100644
--- a/tex/context/base/typo-rep.mkiv
+++ b/tex/context/base/typo-rep.mkiv
@@ -15,8 +15,8 @@
% experimental stripping
-%D For a while we had stripping built into the analyzer. Khaled
-%D suggested to generalize this so I changed the code into a
+%D For a while we had stripping of special chars built into the analyzer
+%D but Khaled suggested to generalize this so I changed the code into a
%D manipulator there.
%D
%D \starttyping
diff --git a/tex/context/base/typo-spa.lua b/tex/context/base/typo-spa.lua
index 48c7263c7..72e7c7afa 100644
--- a/tex/context/base/typo-spa.lua
+++ b/tex/context/base/typo-spa.lua
@@ -15,6 +15,8 @@ local utfchar = utf.char
local trace_hspacing = false trackers.register("nodes.hspacing", function(v) trace_hspacing = v end)
+local report_spacing = logs.new("spacing")
+
local has_attribute = node.has_attribute
local unset_attribute = node.unset_attribute
local insert_node_before = node.insert_before
@@ -73,7 +75,7 @@ function spacings.process(namespace,attribute,head)
local somepenalty = nodes.somepenalty(prevprev,10000)
if somepenalty then
if trace_hspacing then
- logs.report("spacing","removing penalty and space before %s", utfchar(start.char))
+ report_spacing("removing penalty and space before %s", utfchar(start.char))
end
head, _ = remove_node(head,prev,true)
head, _ = remove_node(head,prevprev,true)
@@ -81,7 +83,7 @@ function spacings.process(namespace,attribute,head)
local somespace = nodes.somespace(prev,true)
if somespace then
if trace_hspacing then
- logs.report("spacing","removing space before %s", utfchar(start.char))
+ report_spacing("removing space before %s", utfchar(start.char))
end
head, _ = remove_node(head,prev,true)
end
@@ -93,7 +95,7 @@ function spacings.process(namespace,attribute,head)
end
if ok then
if trace_hspacing then
- logs.report("spacing","inserting penalty and space before %s", utfchar(start.char))
+ report_spacing("inserting penalty and space before %s", utfchar(start.char))
end
insert_node_before(head,start,make_penalty_node(10000))
insert_node_before(head,start,make_glue_node(tex.scale(quad,left)))
@@ -110,7 +112,7 @@ function spacings.process(namespace,attribute,head)
local somespace = nodes.somespace(nextnext,true)
if somespace then
if trace_hspacing then
- logs.report("spacing","removing penalty and space after %s", utfchar(start.char))
+ report_spacing("removing penalty and space after %s", utfchar(start.char))
end
head, _ = remove_node(head,next,true)
head, _ = remove_node(head,nextnext,true)
@@ -119,7 +121,7 @@ function spacings.process(namespace,attribute,head)
local somespace = nodes.somespace(next,true)
if somespace then
if trace_hspacing then
- logs.report("spacing","removing space after %s", utfchar(start.char))
+ report_spacing("removing space after %s", utfchar(start.char))
end
head, _ = remove_node(head,next,true)
end
@@ -130,7 +132,7 @@ function spacings.process(namespace,attribute,head)
end
if ok then
if trace_hspacing then
- logs.report("spacing","inserting penalty and space after %s", utfchar(start.char))
+ report_spacing("inserting penalty and space after %s", utfchar(start.char))
end
insert_node_after(head,start,make_glue_node(tex.scale(quad,right)))
insert_node_after(head,start,make_penalty_node(10000))
diff --git a/tex/context/base/x-asciimath.lua b/tex/context/base/x-asciimath.lua
index c2b15e313..d278b0d6a 100644
--- a/tex/context/base/x-asciimath.lua
+++ b/tex/context/base/x-asciimath.lua
@@ -12,6 +12,8 @@ if not modules then modules = { } end modules ['x-asciimath'] = {
local trace_mapping = false if trackers then trackers.register("asciimath.mapping", function(v) trace_mapping = v end) end
+local report_asciimath = logs.new("asciimath")
+
local format = string.format
local texsprint, ctxcatcodes = tex.sprint, tex.ctxcatcodes
local lpegmatch = lpeg.match
@@ -164,22 +166,22 @@ local parser
local function converted(original,totex)
local ok, result
if trace_mapping then
- logs.report("asciimath","original : %s",original)
+ report_asciimath("original : %s",original)
end
local premapped = lpegmatch(premapper,original)
if premapped then
if trace_mapping then
- logs.report("asciimath","prepared : %s",premapped)
+ report_asciimath("prepared : %s",premapped)
end
local parsed = lpegmatch(parser,premapped)
if parsed then
if trace_mapping then
- logs.report("asciimath","parsed : %s",parsed)
+ report_asciimath("parsed : %s",parsed)
end
local postmapped = lpegmatch(postmapper,parsed)
if postmapped then
if trace_mapping then
- logs.report("asciimath","finalized : %s",postmapped)
+ report_asciimath("finalized : %s",postmapped)
end
result, ok = postmapped, true
else
diff --git a/tex/context/base/x-asciimath.mkiv b/tex/context/base/x-asciimath.mkiv
index c9252408d..cc0e66e99 100644
--- a/tex/context/base/x-asciimath.mkiv
+++ b/tex/context/base/x-asciimath.mkiv
@@ -13,7 +13,7 @@
%D Lua code.
-\ctxloadluafile{x-asciimath}{}
+\registerctxluafile{x-asciimath}{}
%D The following code is not officially supported and is only meant
%D for the Math4All project.
diff --git a/tex/context/base/x-calcmath.lua b/tex/context/base/x-calcmath.lua
index e4d5da139..27ad56f58 100644
--- a/tex/context/base/x-calcmath.lua
+++ b/tex/context/base/x-calcmath.lua
@@ -9,11 +9,12 @@ if not modules then modules = { } end modules ['x-calcmath'] = {
local format, lower, upper, gsub, sub = string.format, string.lower, string.upper, string.gsub, string.sub
local lpegmatch = lpeg.match
-tex = tex or { }
+local texsprint = (tex and tex.sprint) or function(catcodes,str) print(str) end
-texsprint = tex.sprint or function(catcodes,str) print(str) end
+moduledata = moduledata or { }
+moduledata.calcmath = moduledata.calcmath or { }
-calcmath = { }
+local calcmath = moduledata.calcmath
local list_1 = {
"median", "min", "max", "round", "ln", "log",
diff --git a/tex/context/base/x-calcmath.mkiv b/tex/context/base/x-calcmath.mkiv
index c726843fa..f271a7849 100644
--- a/tex/context/base/x-calcmath.mkiv
+++ b/tex/context/base/x-calcmath.mkiv
@@ -13,14 +13,14 @@
%D Lua code.
-\ctxloadluafile{x-calcmath}{}
+\registerctxluafile{x-calcmath}{}
%D Interface:
\unprotect
-\def\inlinecalcmath #1{\mathematics{\ctxlua{calcmath.tex("#1",1)}}}
-\def\displaycalcmath#1{\startformula\ctxlua{calcmath.tex("#1",2)}\stopformula}
+\def\inlinecalcmath #1{\mathematics{\ctxlua{moduledata.calcmath.tex("#1",1)}}}
+\def\displaycalcmath#1{\startformula\ctxlua{moduledata.calcmath.tex("#1",2)}\stopformula}
\let\calcmath\inlinecalcmath
@@ -37,24 +37,24 @@
\xmlregistersetup{xml:cam:define}
% tex -> lua -> tex -> lua -> tex
-% \mathematics{\ctxlua{calcmath.xml(\!!bs\xmlflush{#1}\!!es,1)}}
+% \mathematics{\ctxlua{moduledata.calcmath.xml(\!!bs\xmlflush{#1}\!!es,1)}}
% tex -> lua -> tex
-% \mathematics{\ctxlua{calcmath.xml("#1",1)}}%
+% \mathematics{\ctxlua{moduledata.calcmath.xml("#1",1)}}%
\startxmlsetups cam:i
- \mathematics{\ctxlua{calcmath.xml("#1",1)}}%
+ \mathematics{\ctxlua{moduledata.calcmath.xml("#1",1)}}%
\stopxmlsetups
\startxmlsetups cam:d
- \startformula\ctxlua{calcmath.xml("#1",2)}\stopformula
+ \startformula\ctxlua{moduledata.calcmath.xml("#1",2)}\stopformula
\stopxmlsetups
\startxmlsetups cam:icm
- \mathematics{\ctxlua{calcmath.xml("#1",1)}}
+ \mathematics{\ctxlua{moduledata.calcmath.xml("#1",1)}}
\stopxmlsetups
\startxmlsetups cam:dcm
- \startformula\ctxlua{calcmath.xml("#1",2)}\stopformula
+ \startformula\ctxlua{moduledata.calcmath.xml("#1",2)}\stopformula
\stopxmlsetups
\protect \endinput
diff --git a/tex/context/base/x-cals.mkiv b/tex/context/base/x-cals.mkiv
index 32d5767c1..3e37048c9 100644
--- a/tex/context/base/x-cals.mkiv
+++ b/tex/context/base/x-cals.mkiv
@@ -15,7 +15,7 @@
\startmodule [cals]
-\ctxloadluafile{x-cals}{}
+\registerctxluafile{x-cals}{}
% \startxmlsetups xml:cals:process
% \xmlsetsetup {\xmldocument} {cals:table} {*}
diff --git a/tex/context/base/x-ct.mkiv b/tex/context/base/x-ct.mkiv
index d282193a1..ad20eea05 100644
--- a/tex/context/base/x-ct.mkiv
+++ b/tex/context/base/x-ct.mkiv
@@ -15,7 +15,7 @@
\startmodule [ct]
-\ctxloadluafile{x-ct}{}
+\registerctxluafile{x-ct}{}
\startxmlsetups xml:context:process
\xmlsetfunction {\xmldocument} {context:tabulate} {lxml.context.tabulate}
diff --git a/tex/context/base/x-mathml.mkiv b/tex/context/base/x-mathml.mkiv
index a5245c835..4d093a463 100644
--- a/tex/context/base/x-mathml.mkiv
+++ b/tex/context/base/x-mathml.mkiv
@@ -21,7 +21,7 @@
\startmodule [mathml]
-\ctxloadluafile{x-mathml}{}
+\registerctxluafile{x-mathml}{}
\startxmlsetups xml:mml:define
\xmlsetsetup{\xmldocument} {(formula|subformula)} {mml:formula}
diff --git a/tex/context/base/x-set-11.mkiv b/tex/context/base/x-set-11.mkiv
index 784df3113..5e96436d7 100644
--- a/tex/context/base/x-set-11.mkiv
+++ b/tex/context/base/x-set-11.mkiv
@@ -12,6 +12,8 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+% we can make this module a bit cleaner using more recent features
+
% \startluacode
% collectgarbage("stop")
% function collectgarbage() return 0 end
@@ -97,7 +99,7 @@
\let\currentSETUPfullname\s!unknown
\startxmlsetups xml:setups:assemblename
- \doifelse {\xmlatt{#1}{environment}} {yes} {
+ \doifelse {\xmlatt{#1}{type}} {environment} {
\let\currentSETUPprefix\e!start
} {
\let\currentSETUPprefix\empty
@@ -153,8 +155,8 @@
\newif\ifshortsetup
\unexpanded\def\setup {\shortsetupfalse\doshowsetup}
-\def\showsetup {\shortsetupfalse\doshowsetup}
-\def\shortsetup{\shortsetuptrue \doshowsetup}
+\unexpanded\def\showsetup {\shortsetupfalse\doshowsetup}
+\unexpanded\def\shortsetup{\shortsetuptrue \doshowsetup}
\unexpanded\def\setupsetup{\dodoubleargument\getparameters[\??stp]}
%unexpanded\def\showsetupinlist#1#2#3{\shortsetupfalse\showsetupindeed{#3}\par}
@@ -176,8 +178,11 @@
{\registersort[texcommand][stp:x:#1]%
\showsetupindeed{#1}}
+% \def\showsetupindeed#1%
+% {\xmlfilterlist{\loadedsetups}{/interface/command[@name='#1']/command(xml:setups:typeset)}}
+
\def\showsetupindeed#1%
- {\xmlfilterlist{\loadedsetups}{/interface/command[@name='#1']/command(xml:setups:typeset)}}
+ {\xmlfilterlist{\loadedsetups}{/interface/command['#1' == (@type=='environment' and 'start' or '') .. @name]/command(xml:setups:typeset)}}
\unexpanded\def\placesetup {\placelistofsorts[texcommand][\c!criterium=\v!used]}
\unexpanded\def\placeallsetups{\placelistofsorts[texcommand][\c!criterium=\v!all ]}
@@ -231,7 +236,7 @@
\ttsl
}
\tex{\e!stop}
- \xmlfilter{#1}{/sequence/variable/first()}
+ \xmlfilter{#1}{/sequence/first()}
\ignorespaces
\egroup
}
diff --git a/tex/context/fonts/informal-math.lfg b/tex/context/fonts/informal-math.lfg
index 67fb73b39..a1f461740 100644
--- a/tex/context/fonts/informal-math.lfg
+++ b/tex/context/fonts/informal-math.lfg
@@ -9,13 +9,13 @@ return {
"original-micropress-informal.map",
},
virtuals = {
- ["hvmath-math"] = {
+ ["informal-math"] = {
{ name = "file:ifrg.afm", features = "virtualmath", main = true },
- { name = "ifrm10cm.tfm", vector="tex-mr" },
- { name = "ifmi10", vector = "tex-mi", skewchar=0x7F },
- { name = "ifmi10.tfm", vector = "tex-it", skewchar=0x7F },
- { name = "ifsy10.tfm", vector = "tex-sy", skewchar=0x30, parameters = true },
- { name = "ifex10.tfm", vector = "tex-ex", extension = true },
+ { name = "file:ifrm10cm.tfm", vector="tex-mr" },
+ { name = "file:ifmi10.tfm", vector = "tex-mi", skewchar=0x7F },
+ { name = "file:ifmi10.tfm", vector = "tex-it", skewchar=0x7F },
+ { name = "file:ifsy10.tfm", vector = "tex-sy", skewchar=0x30, parameters = true },
+ { name = "file:ifex10.tfm", vector = "tex-ex", extension = true },
}
}
}
diff --git a/tex/context/fonts/lm-math.lfg b/tex/context/fonts/lm-math.lfg
index 361b5bb86..7b3bfe6e0 100644
--- a/tex/context/fonts/lm-math.lfg
+++ b/tex/context/fonts/lm-math.lfg
@@ -14,6 +14,7 @@ local five = {
{ name = "lmex10.tfm", vector = "tex-ex", extension = true } ,
{ name = "msam5.tfm", vector = "tex-ma" },
{ name = "msbm5.tfm", vector = "tex-mb" },
+ { name = "stmary10.afm", vector = "tex-mc" },
-- { name = "rm-lmbx5.tfm", vector = "tex-bf" } ,
{ name = "lmroman5-bold", vector = "tex-bf" } ,
{ name = "lmmib5.tfm", vector = "tex-bi", skewchar=0x7F } ,
@@ -36,6 +37,7 @@ local six = {
{ name = "lmex10.tfm", vector = "tex-ex", extension = true } ,
{ name = "msam5.tfm", vector = "tex-ma" },
{ name = "msbm5.tfm", vector = "tex-mb" },
+ { name = "stmary10.afm", vector = "tex-mc" },
-- { name = "rm-lmbx6.tfm", vector = "tex-bf" } ,
{ name = "lmroman6-bold.otf", vector = "tex-bf" } ,
{ name = "lmmib5.tfm", vector = "tex-bi", skewchar=0x7F } ,
@@ -61,6 +63,7 @@ local seven = {
{ name = "lmex10.tfm", vector = "tex-ex", extension = true } ,
{ name = "msam7.tfm", vector = "tex-ma" },
{ name = "msbm7.tfm", vector = "tex-mb" },
+ { name = "stmary10.afm", vector = "tex-mc" },
-- { name = "rm-lmbx7.tfm", vector = "tex-bf" } ,
{ name = "lmroman7-bold.otf", vector = "tex-bf" } ,
{ name = "lmmib7.tfm", vector = "tex-bi", skewchar=0x7F } ,
@@ -84,6 +87,7 @@ local eight = {
{ name = "lmex10.tfm", vector = "tex-ex", extension = true } ,
{ name = "msam7.tfm", vector = "tex-ma" },
{ name = "msbm7.tfm", vector = "tex-mb" },
+ { name = "stmary10.afm", vector = "tex-mc" },
-- { name = "rm-lmbx8.tfm", vector = "tex-bf" } ,
{ name = "lmroman8-bold.otf", vector = "tex-bf" } ,
{ name = "lmmib7.tfm", vector = "tex-bi", skewchar=0x7F } ,
@@ -107,6 +111,7 @@ local nine = {
{ name = "lmex10.tfm", vector = "tex-ex", extension = true } ,
{ name = "msam10.tfm", vector = "tex-ma" },
{ name = "msbm10.tfm", vector = "tex-mb" },
+ { name = "stmary10.afm", vector = "tex-mc" },
-- { name = "rm-lmbx9.tfm", vector = "tex-bf" } ,
{ name = "lmroman9-bold.otf", vector = "tex-bf" } ,
{ name = "lmmib10.tfm", vector = "tex-bi", skewchar=0x7F } ,
@@ -133,6 +138,7 @@ local ten = {
{ name = "lmex10.tfm", vector = "tex-ex", extension = true } ,
{ name = "msam10.tfm", vector = "tex-ma" },
{ name = "msbm10.tfm", vector = "tex-mb" },
+ { name = "stmary10.afm", vector = "tex-mc" },
-- { name = "rm-lmbx10.tfm", vector = "tex-bf" } ,
{ name = "lmroman10-bold.otf", vector = "tex-bf" } ,
{ name = "lmmib10.tfm", vector = "tex-bi", skewchar=0x7F } ,
@@ -152,6 +158,7 @@ local ten_bold = {
-- copied from roman:
{ name = "msam10.tfm", vector = "tex-ma" },
{ name = "msbm10.tfm", vector = "tex-mb" },
+ { name = "stmary10.afm", vector = "tex-mc" },
-- { name = "rm-lmbx10.tfm", vector = "tex-bf" } ,
{ name = "lmroman10-bold.otf", vector = "tex-bf" } ,
{ name = "lmmib10.tfm", vector = "tex-bi", skewchar=0x7F } ,
@@ -174,6 +181,7 @@ local twelve = {
{ name = "lmex10.tfm", vector = "tex-ex", extension = true } ,
{ name = "msam10.tfm", vector = "tex-ma" },
{ name = "msbm10.tfm", vector = "tex-mb" },
+ { name = "stmary10.afm", vector = "tex-mc" },
-- { name = "rm-lmbx12.tfm", vector = "tex-bf" } ,
{ name = "lmroman12-bold.otf", vector = "tex-bf" } ,
{ name = "lmmib10.tfm", vector = "tex-bi", skewchar=0x7F } ,
@@ -194,6 +202,7 @@ local seventeen = {
{ name = "lmex10.tfm", vector = "tex-ex", extension = true } ,
{ name = "msam10.tfm", vector = "tex-ma" },
{ name = "msbm10.tfm", vector = "tex-mb" },
+ { name = "stmary10.afm", vector = "tex-mc" },
-- { name = "rm-lmbx12.tfm", vector = "tex-bf" } ,
{ name = "lmroman12-bold.otf", vector = "tex-bf" } ,
{ name = "lmmib10.tfm", vector = "tex-bi", skewchar=0x7F } ,
diff --git a/tex/context/patterns/lang-it.pat b/tex/context/patterns/lang-it.pat
index 8e7ed87eb..9c0bffc6e 100644
--- a/tex/context/patterns/lang-it.pat
+++ b/tex/context/patterns/lang-it.pat
@@ -78,6 +78,7 @@ b2r
2cz
2chh
c2h
+2ch.
2chb
ch2r
2chn
@@ -159,6 +160,7 @@ k2r
2l3f2
2lg
l2h
+l2j
2lk
2ll
2lm
diff --git a/tex/context/sample/khatt-ar.tex b/tex/context/sample/khatt-ar.tex
new file mode 100644
index 000000000..c91426411
--- /dev/null
+++ b/tex/context/sample/khatt-ar.tex
@@ -0,0 +1,4 @@
+قَالَ عَلِيُّ بْنُ أَبِي طَالِبٍ لِكَاتِبِهِ عُبَيْدِ اللّٰهِ بْنِ
+أَبِي رَافِعٍ: أَلِقْ دَوَاتَكَ، وَ أَطِلْ جِلْفَةَ قَلَمِكَ، وَ فَرِّجْ
+بَيْنَ السُّطُورِ، وَ قَرْمِطْ بَيْنَ الْحُرُوفِ؛ فَإِنَّ ذَلِكَ أَجْدَرُ
+بِصَبَاحَةِ الْخَطِّ.
diff --git a/tex/context/sample/khatt-en.tex b/tex/context/sample/khatt-en.tex
new file mode 100644
index 000000000..f994513e7
--- /dev/null
+++ b/tex/context/sample/khatt-en.tex
@@ -0,0 +1,4 @@
+ʿAlī ibn Abī Ṭālib said to his scribe ʿUbaydullāh ibn Abī Rāfiʿ:
+your inkwell before you, sharpen the edge of your pen, make sure
+there is open space between the lines, and set your letter|-|spacing
+closely. Now {\em that} is the way to make the script shine!
diff --git a/tex/generic/context/luatex-fonts-merged.lua b/tex/generic/context/luatex-fonts-merged.lua
index da81735ff..5a5aba776 100644
--- a/tex/generic/context/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 05/24/10 13:05:12
+-- merge date : 06/23/10 12:45:11
do -- begin closure to overcome local limits and interference
@@ -347,6 +347,11 @@ patterns.whitespace = patterns.eol + patterns.spacer
patterns.nonwhitespace = 1 - patterns.whitespace
patterns.utf8 = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
patterns.utfbom = P('\000\000\254\255') + P('\255\254\000\000') + P('\255\254') + P('\254\255') + P('\239\187\191')
+patterns.validutf8 = patterns.utf8^0 * P(-1) * Cc(true) + Cc(false)
+
+patterns.undouble = P('"')/"" * (1-P('"'))^0 * P('"')/""
+patterns.unsingle = P("'")/"" * (1-P("'"))^0 * P("'")/""
+patterns.unspacer = ((patterns.spacer^1)/"")^0
function lpeg.anywhere(pattern) --slightly adapted from website
return P { P(pattern) + 1 * V(1) } -- why so complex?
@@ -463,6 +468,64 @@ local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 6
patterns.utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+local cache = { }
+
+function lpeg.stripper(str)
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+end
+
+function lpeg.replacer(t)
+ if #t > 0 then
+ local p
+ for i=1,#t do
+ local ti= t[i]
+ local pp = P(ti[1]) / ti[2]
+ p = (p and p + pp ) or pp
+ end
+ return Cs((p + 1)^0)
+ end
+end
+
+--~ print(utf.check(""))
+--~ print(utf.check("abcde"))
+--~ print(utf.check("abcde\255\123"))
+
+local splitters_f, splitters_s = { }, { }
+
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = C((1 - separator)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = (1 - separator)^0 * separator * C(P(1)^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+--~ print(1,match(lpeg.firstofsplit(":"),"bc:de"))
+--~ print(2,match(lpeg.firstofsplit(":"),":de")) -- empty
+--~ print(3,match(lpeg.firstofsplit(":"),"bc"))
+--~ print(4,match(lpeg.secondofsplit(":"),"bc:de"))
+--~ print(5,match(lpeg.secondofsplit(":"),"bc:")) -- empty
+--~ print(6,match(lpeg.secondofsplit(":",""),"bc"))
+--~ print(7,match(lpeg.secondofsplit(":"),"bc"))
+--~ print(9,match(lpeg.secondofsplit(":","123"),"bc"))
+
end -- closure
do -- begin closure to overcome local limits and interference
@@ -504,7 +567,7 @@ function toboolean(str,tolerant)
end
end
-function string.is_boolean(str)
+function string.is_boolean(str,default)
if type(str) == "string" then
if str == "true" or str == "yes" or str == "on" or str == "t" then
return true
@@ -512,7 +575,7 @@ function string.is_boolean(str)
return false
end
end
- return nil
+ return default
end
function boolean.alwaystrue()
@@ -1180,7 +1243,7 @@ local function serialize(root,name,_handle,_reduce,_noquotes,_hexify)
handle("t={")
end
if root and next(root) then
- do_serialize(root,name,"",0,indexed)
+ do_serialize(root,name,"",0)
end
handle("}")
end
@@ -1483,6 +1546,17 @@ function table.insert_after_value(t,value,extra)
insert(t,#t+1,extra)
end
+function table.sequenced(t,sep)
+ local s = { }
+ for k, v in next, t do -- indexed?
+ s[#s+1] = k .. "=" .. tostring(v)
+ end
+ return concat(s, sep or " | ")
+end
+
+function table.print(...)
+ print(table.serialize(...))
+end
end -- closure
@@ -1500,45 +1574,88 @@ if not modules then modules = { } end modules ['l-file'] = {
file = file or { }
-local concat = table.concat
+local insert, concat = table.insert, table.concat
local find, gmatch, match, gsub, sub, char = string.find, string.gmatch, string.match, string.gsub, string.sub, string.char
local lpegmatch = lpeg.match
+local getcurrentdir = lfs.currentdir
-function file.removesuffix(filename)
- return (gsub(filename,"%.[%a%d]+$",""))
+local function dirname(name,default)
+ return match(name,"^(.+)[/\\].-$") or (default or "")
end
-function file.addsuffix(filename, suffix)
- if not suffix or suffix == "" then
- return filename
- elseif not find(filename,"%.[%a%d]+$") then
- return filename .. "." .. suffix
- else
- return filename
- end
+local function basename(name)
+ return match(name,"^.+[/\\](.-)$") or name
end
-function file.replacesuffix(filename, suffix)
- return (gsub(filename,"%.[%a%d]+$","")) .. "." .. suffix
+local function nameonly(name)
+ return (gsub(match(name,"^.+[/\\](.-)$") or name,"%..*$",""))
end
-function file.dirname(name,default)
- return match(name,"^(.+)[/\\].-$") or (default or "")
+local function extname(name,default)
+ return match(name,"^.+%.([^/\\]-)$") or default or ""
end
-function file.basename(name)
- return match(name,"^.+[/\\](.-)$") or name
+local function splitname(name)
+ local n, s = match(name,"^(.+)%.([^/\\]-)$")
+ return n or name, s or ""
end
-function file.nameonly(name)
- return (gsub(match(name,"^.+[/\\](.-)$") or name,"%..*$",""))
+file.basename = basename
+file.dirname = dirname
+file.nameonly = nameonly
+file.extname = extname
+file.suffix = extname
+
+function file.removesuffix(filename)
+ return (gsub(filename,"%.[%a%d]+$",""))
end
-function file.extname(name,default)
- return match(name,"^.+%.([^/\\]-)$") or default or ""
+function file.addsuffix(filename, suffix, criterium)
+ if not suffix or suffix == "" then
+ return filename
+ elseif criterium == true then
+ return filename .. "." .. suffix
+ elseif not criterium then
+ local n, s = splitname(filename)
+ if not s or s == "" then
+ return filename .. "." .. suffix
+ else
+ return filename
+ end
+ else
+ local n, s = splitname(filename)
+ if s and s ~= "" then
+ local t = type(criterium)
+ if t == "table" then
+ -- keep if in criterium
+ for i=1,#criterium do
+ if s == criterium[i] then
+ return filename
+ end
+ end
+ elseif t == "string" then
+ -- keep if criterium
+ if s == criterium then
+ return filename
+ end
+ end
+ end
+ return n .. "." .. suffix
+ end
end
-file.suffix = file.extname
+--~ print("1 " .. file.addsuffix("name","new") .. " -> name.new")
+--~ print("2 " .. file.addsuffix("name.old","new") .. " -> name.old")
+--~ print("3 " .. file.addsuffix("name.old","new",true) .. " -> name.old.new")
+--~ print("4 " .. file.addsuffix("name.old","new","new") .. " -> name.new")
+--~ print("5 " .. file.addsuffix("name.old","new","old") .. " -> name.old")
+--~ print("6 " .. file.addsuffix("name.old","new","foo") .. " -> name.new")
+--~ print("7 " .. file.addsuffix("name.old","new",{"foo","bar"}) .. " -> name.new")
+--~ print("8 " .. file.addsuffix("name.old","new",{"old","bar"}) .. " -> name.old")
+
+function file.replacesuffix(filename, suffix)
+ return (gsub(filename,"%.[%a%d]+$","")) .. "." .. suffix
+end
--~ function file.join(...)
--~ local pth = concat({...},"/")
@@ -1591,7 +1708,7 @@ end
--~ print(file.join("//nas-1","/y"))
function file.iswritable(name)
- local a = lfs.attributes(name) or lfs.attributes(file.dirname(name,"."))
+ local a = lfs.attributes(name) or lfs.attributes(dirname(name,"."))
return a and sub(a.permissions,2,2) == "w"
end
@@ -1630,31 +1747,94 @@ end
-- we can hash them weakly
-function file.collapse_path(str)
+--~ function file.old_collapse_path(str) -- fails on b.c/..
+--~ str = gsub(str,"\\","/")
+--~ if find(str,"/") then
+--~ str = gsub(str,"^%./",(gsub(lfs.currentdir(),"\\","/")) .. "/") -- ./xx in qualified
+--~ str = gsub(str,"/%./","/")
+--~ local n, m = 1, 1
+--~ while n > 0 or m > 0 do
+--~ str, n = gsub(str,"[^/%.]+/%.%.$","")
+--~ str, m = gsub(str,"[^/%.]+/%.%./","")
+--~ end
+--~ str = gsub(str,"([^/])/$","%1")
+--~ -- str = gsub(str,"^%./","") -- ./xx in qualified
+--~ str = gsub(str,"/%.$","")
+--~ end
+--~ if str == "" then str = "." end
+--~ return str
+--~ end
+--~
+--~ The previous one fails on "a.b/c" so Taco came up with a split based
+--~ variant. After some skyping we got it sort of compatible with the old
+--~ one. After that the anchoring to currentdir was added in a better way.
+--~ Of course there are some optimizations too. Finally we had to deal with
+--~ windows drive prefixes and thinsg like sys://.
+
+function file.collapse_path(str,anchor)
+ if anchor and not find(str,"^/") and not find(str,"^%a:") then
+ str = getcurrentdir() .. "/" .. str
+ end
+ if str == "" or str =="." then
+ return "."
+ elseif find(str,"^%.%.") then
+ str = gsub(str,"\\","/")
+ return str
+ elseif not find(str,"%.") then
+ str = gsub(str,"\\","/")
+ return str
+ end
str = gsub(str,"\\","/")
- if find(str,"/") then
- str = gsub(str,"^%./",(gsub(lfs.currentdir(),"\\","/")) .. "/") -- ./xx in qualified
- str = gsub(str,"/%./","/")
- local n, m = 1, 1
- while n > 0 or m > 0 do
- str, n = gsub(str,"[^/%.]+/%.%.$","")
- str, m = gsub(str,"[^/%.]+/%.%./","")
- end
- str = gsub(str,"([^/])/$","%1")
- -- str = gsub(str,"^%./","") -- ./xx in qualified
- str = gsub(str,"/%.$","")
- end
- if str == "" then str = "." end
- return str
+ local starter, rest = match(str,"^(%a+:/*)(.-)$")
+ if starter then
+ str = rest
+ end
+ local oldelements = checkedsplit(str,"/")
+ local newelements = { }
+ local i = #oldelements
+ while i > 0 do
+ local element = oldelements[i]
+ if element == '.' then
+ -- do nothing
+ elseif element == '..' then
+ local n = i -1
+ while n > 0 do
+ local element = oldelements[n]
+ if element ~= '..' and element ~= '.' then
+ oldelements[n] = '.'
+ break
+ else
+ n = n - 1
+ end
+ end
+ if n < 1 then
+ insert(newelements,1,'..')
+ end
+ elseif element ~= "" then
+ insert(newelements,1,element)
+ end
+ i = i - 1
+ end
+ if #newelements == 0 then
+ return starter or "."
+ elseif starter then
+ return starter .. concat(newelements, '/')
+ elseif find(str,"^/") then
+ return "/" .. concat(newelements,'/')
+ else
+ return concat(newelements, '/')
+ end
end
---~ print(file.collapse_path("/a"))
---~ print(file.collapse_path("a/./b/.."))
---~ print(file.collapse_path("a/aa/../b/bb"))
---~ print(file.collapse_path("a/../.."))
---~ print(file.collapse_path("a/.././././b/.."))
---~ print(file.collapse_path("a/./././b/.."))
---~ print(file.collapse_path("a/b/c/../.."))
+--~ local function test(str)
+--~ print(string.format("%-20s %-15s %-15s",str,file.collapse_path(str),file.collapse_path(str,true)))
+--~ end
+--~ test("a/b.c/d") test("b.c/d") test("b.c/..")
+--~ test("/") test("c:/..") test("sys://..")
+--~ test("") test("./") test(".") test("..") test("./..") test("../..")
+--~ test("a") test("./a") test("/a") test("a/../..")
+--~ test("a/./b/..") test("a/aa/../b/bb") test("a/.././././b/..") test("a/./././b/..")
+--~ test("a/b/c/../..") test("./a/b/c/../..") test("a/b/c/../..")
function file.robustname(str)
return (gsub(str,"[^%a%d%/%-%.\\]+","-"))
@@ -2000,7 +2180,7 @@ end -- closure
do -- begin closure to overcome local limits and interference
if not modules then modules = { } end modules ['luat-dum'] = {
- version = 1.001,
+ version = 1.100,
comment = "companion to luatex-*.tex",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
@@ -2029,15 +2209,16 @@ experiments = {
enable = dummyfunction,
disable = dummyfunction,
}
-storage = {
+storage = { -- probably no longer needed
register = dummyfunction,
shared = { },
}
logs = {
+ new = function() return dummyfunction end,
report = dummyfunction,
simple = dummyfunction,
}
-tasks = {
+tasks = { -- no longer needed
new = dummyfunction,
actions = dummyfunction,
appendaction = dummyfunction,
@@ -2081,27 +2262,80 @@ end
-- usage as I don't want any dependency at all. Also, ConTeXt might have
-- different needs and tricks added.
+--~ containers.usecache = true
+
caches = { }
---~ containers.usecache = true
+local writable, readables = nil, { }
+
+if not caches.namespace or caches.namespace == "" or caches.namespace == "context" then
+ caches.namespace = 'generic'
+end
+
+do
+
+ local cachepaths = kpse.expand_path('$TEXMFCACHE') or ""
-function caches.setpath(category,subcategory)
- local root = kpse.var_value("TEXMFCACHE") or ""
- if root == "" then
- root = kpse.var_value("VARTEXMF") or ""
+ if cachepaths == "" then
+ cachepaths = kpse.expand_path('$VARTEXMF')
end
- if root ~= "" then
- root = file.join(root,category)
- lfs.mkdir(root)
- root = file.join(root,subcategory)
- lfs.mkdir(root)
- return lfs.isdir(root) and root
+
+ if cachepaths == "" then
+ cachepaths = "."
end
+
+ cachepaths = string.split(cachepaths,os.type == "windows" and ";" or ":")
+
+ for i=1,#cachepaths do
+ if file.iswritable(cachepaths[i]) then
+ writable = file.join(cachepaths[i],"luatex-cache")
+ lfs.mkdir(writable)
+ writable = file.join(writable,caches.namespace)
+ lfs.mkdir(writable)
+ break
+ end
+ end
+
+ for i=1,#cachepaths do
+ if file.isreadable(cachepaths[i]) then
+ readables[#readables+1] = file.join(cachepaths[i],"luatex-cache",caches.namespace)
+ end
+ end
+
+ if not writable then
+ texio.write_nl("quiting: fix your writable cache path")
+ os.exit()
+ elseif #readables == 0 then
+ texio.write_nl("quiting: fix your readable cache path")
+ os.exit()
+ elseif #readables == 1 and readables[1] == writable then
+ texio.write(string.format("(using cache: %s)",writable))
+ else
+ texio.write(string.format("(using write cache: %s)",writable))
+ texio.write(string.format("(using read cache: %s)",table.concat(readables, " ")))
+ end
+
+end
+
+function caches.getwritablepath(category,subcategory)
+ local path = file.join(writable,category)
+ lfs.mkdir(path)
+ path = file.join(path,subcategory)
+ lfs.mkdir(path)
+ return path
+end
+
+function caches.getreadablepaths(category,subcategory)
+ local t = { }
+ for i=1,#readables do
+ t[i] = file.join(readables[i],category,subcategory)
+ end
+ return t
end
local function makefullname(path,name)
if path and path ~= "" then
- name = "temp-" and name -- clash prevention
+ name = "temp-" .. name -- clash prevention
return file.addsuffix(file.join(path,name),"lua")
end
end
@@ -2111,17 +2345,21 @@ function caches.iswritable(path,name)
return fullname and file.iswritable(fullname)
end
-function caches.loaddata(path,name)
- local fullname = makefullname(path,name)
- if fullname then
- local data = loadfile(fullname)
- return data and data()
+function caches.loaddata(paths,name)
+ for i=1,#paths do
+ local fullname = makefullname(paths[i],name)
+ if fullname then
+ texio.write(string.format("(load: %s)",fullname))
+ local data = loadfile(fullname)
+ return data and data()
+ end
end
end
function caches.savedata(path,name,data)
local fullname = makefullname(path,name)
if fullname then
+ texio.write(string.format("(save: %s)",fullname))
table.tofile(fullname,data,'return',false,true,false)
end
end
@@ -2131,7 +2369,7 @@ end -- closure
do -- begin closure to overcome local limits and interference
if not modules then modules = { } end modules ['data-con'] = {
- version = 1.001,
+ version = 1.100,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
@@ -2161,46 +2399,58 @@ containers = containers or { }
containers.usecache = true
+local report_cache = logs.new("cache")
+
local function report(container,tag,name)
if trace_cache or trace_containers then
- logs.report(format("%s cache",container.subcategory),"%s: %s",tag,name or 'invalid')
+ report_cache("container: %s, tag: %s, name: %s",container.subcategory,tag,name or 'invalid')
end
end
local allocated = { }
--- tracing
+local mt = {
+ __index = function(t,k)
+ if k == "writable" then
+ local writable = caches.getwritablepath(t.category,t.subcategory) or { "." }
+ t.writable = writable
+ return writable
+ elseif k == "readables" then
+ local readables = caches.getreadablepaths(t.category,t.subcategory) or { "." }
+ t.readables = readables
+ return readables
+ end
+ end
+}
function containers.define(category, subcategory, version, enabled)
- return function()
- if category and subcategory then
- local c = allocated[category]
- if not c then
- c = { }
- allocated[category] = c
- end
- local s = c[subcategory]
- if not s then
- s = {
- category = category,
- subcategory = subcategory,
- storage = { },
- enabled = enabled,
- version = version or 1.000,
- trace = false,
- path = caches and caches.setpath and caches.setpath(category,subcategory),
- }
- c[subcategory] = s
- end
- return s
- else
- return nil
+ if category and subcategory then
+ local c = allocated[category]
+ if not c then
+ c = { }
+ allocated[category] = c
+ end
+ local s = c[subcategory]
+ if not s then
+ s = {
+ category = category,
+ subcategory = subcategory,
+ storage = { },
+ enabled = enabled,
+ version = version or math.pi, -- after all, this is TeX
+ trace = false,
+ -- writable = caches.getwritablepath and caches.getwritablepath (category,subcategory) or { "." },
+ -- readables = caches.getreadablepaths and caches.getreadablepaths(category,subcategory) or { "." },
+ }
+ setmetatable(s,mt)
+ c[subcategory] = s
end
+ return s
end
end
function containers.is_usable(container, name)
- return container.enabled and caches and caches.iswritable(container.path, name)
+ return container.enabled and caches and caches.iswritable(container.writable, name)
end
function containers.is_valid(container, name)
@@ -2213,18 +2463,20 @@ function containers.is_valid(container, name)
end
function containers.read(container,name)
- if container.enabled and caches and not container.storage[name] and containers.usecache then
- container.storage[name] = caches.loaddata(container.path,name)
- if containers.is_valid(container,name) then
+ local storage = container.storage
+ local stored = storage[name]
+ if not stored and container.enabled and caches and containers.usecache then
+ stored = caches.loaddata(container.readables,name)
+ if stored and stored.cache_version == container.version then
report(container,"loaded",name)
else
- container.storage[name] = nil
+ stored = nil
end
- end
- if container.storage[name] then
+ storage[name] = stored
+ elseif stored then
report(container,"reusing",name)
end
- return container.storage[name]
+ return stored
end
function containers.write(container, name, data)
@@ -2233,7 +2485,7 @@ function containers.write(container, name, data)
if container.enabled and caches then
local unique, shared = data.unique, data.shared
data.unique, data.shared = nil, nil
- caches.savedata(container.path, name, data)
+ caches.savedata(container.writable, name, data)
report(container,"saved",name)
data.unique, data.shared = unique, shared
end
@@ -2255,122 +2507,94 @@ end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['node-ini'] = {
+if not modules then modules = { } end modules ['node-dum'] = {
version = 1.001,
- comment = "companion to node-ini.mkiv",
+ comment = "companion to luatex-*.tex",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
---[[ldx--
-<p>Most of the code that had accumulated here is now separated in
-modules.</p>
---ldx]]--
+nodes = nodes or { }
+fonts = fonts or { }
+attributes = attributes or { }
--- this module is being reconstructed
+local traverse_id = node.traverse_id
+local free_node = node.free
+local remove_node = node.remove
+local new_node = node.new
-local utf = unicode.utf8
-local next, type = next, type
-local format, concat, match, utfchar = string.format, table.concat, string.match, utf.char
+local glyph = node.id('glyph')
-local chardata = characters and characters.data
+-- fonts
---[[ldx--
-<p>We start with a registration system for atributes so that we can use the
-symbolic names later on.</p>
---ldx]]--
+local fontdata = fonts.ids or { }
-attributes = attributes or { }
+function nodes.simple_font_handler(head)
+-- lang.hyphenate(head)
+ head = nodes.process_characters(head)
+ nodes.inject_kerns(head)
+ nodes.protect_glyphs(head)
+ head = node.ligaturing(head)
+ head = node.kerning(head)
+ return head
+end
-attributes.names = attributes.names or { }
-attributes.numbers = attributes.numbers or { }
-attributes.list = attributes.list or { }
-attributes.unsetvalue = -0x7FFFFFFF
+if tex.attribute[0] ~= 0 then
-storage.register("attributes/names", attributes.names, "attributes.names")
-storage.register("attributes/numbers", attributes.numbers, "attributes.numbers")
-storage.register("attributes/list", attributes.list, "attributes.list")
+ texio.write_nl("log","!")
+ texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be")
+ texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special")
+ texio.write_nl("log","! purposed so setting them at the TeX end might break the font handler.")
+ texio.write_nl("log","!")
-local names, numbers, list = attributes.names, attributes.numbers, attributes.list
+ tex.attribute[0] = 0 -- else no features
-function attributes.define(name,number) -- at the tex end
- if not numbers[name] then
- numbers[name], names[number], list[number] = number, name, { }
- end
end
---[[ldx--
-<p>We can use the attributes in the range 127-255 (outside user space). These
-are only used when no attribute is set at the \TEX\ end which normally
-happens in <l n='context'/>.</p>
---ldx]]--
-
-storage.shared.attributes_last_private = storage.shared.attributes_last_private or 127
+nodes.protect_glyphs = node.protect_glyphs
+nodes.unprotect_glyphs = node.unprotect_glyphs
-function attributes.private(name) -- at the lua end (hidden from user)
- local number = numbers[name]
- if not number then
- local last = storage.shared.attributes_last_private or 127
- if last < 255 then
- last = last + 1
- storage.shared.attributes_last_private = last
+function nodes.process_characters(head)
+ local usedfonts, done, prevfont = { }, false, nil
+ for n in traverse_id(glyph,head) do
+ local font = n.font
+ if font ~= prevfont then
+ prevfont = font
+ local used = usedfonts[font]
+ if not used then
+ local tfmdata = fontdata[font]
+ if tfmdata then
+ local shared = tfmdata.shared -- we need to check shared, only when same features
+ if shared then
+ local processors = shared.processes
+ if processors and #processors > 0 then
+ usedfonts[font] = processors
+ done = true
+ end
+ end
+ end
+ end
end
- number = last
- numbers[name], names[number], list[number] = number, name, { }
end
- return number
+ if done then
+ for font, processors in next, usedfonts do
+ for i=1,#processors do
+ local h, d = processors[i](head,font,0)
+ head, done = h or head, done or d
+ end
+ end
+ end
+ return head, true
end
---[[ldx--
-<p>Access to nodes is what gives <l n='luatex'/> its power. Here we
-implement a few helper functions. These functions are rather optimized.</p>
---ldx]]--
+-- helper
---[[ldx--
-<p>When manipulating node lists in <l n='context'/>, we will remove
-nodes and insert new ones. While node access was implemented, we did
-quite some experiments in order to find out if manipulating nodes
-in <l n='lua'/> was feasible from the perspective of performance.</p>
-
-<p>First of all, we noticed that the bottleneck is more with excessive
-callbacks (some gets called very often) and the conversion from and to
-<l n='tex'/>'s datastructures. However, at the <l n='lua'/> end, we
-found that inserting and deleting nodes in a table could become a
-bottleneck.</p>
-
-<p>This resulted in two special situations in passing nodes back to
-<l n='tex'/>: a table entry with value <type>false</type> is ignored,
-and when instead of a table <type>true</type> is returned, the
-original table is used.</p>
-
-<p>Insertion is handled (at least in <l n='context'/> as follows. When
-we need to insert a node at a certain position, we change the node at
-that position by a dummy node, tagged <type>inline</type> which itself
-has_attribute the original node and one or more new nodes. Before we pass
-back the list we collapse the list. Of course collapsing could be built
-into the <l n='tex'/> engine, but this is a not so natural extension.</p>
-
-<p>When we collapse (something that we only do when really needed), we
-also ignore the empty nodes. [This is obsolete!]</p>
---ldx]]--
-
-nodes = nodes or { }
-
-local hlist = node.id('hlist')
-local vlist = node.id('vlist')
-local glyph = node.id('glyph')
-local glue = node.id('glue')
-local penalty = node.id('penalty')
-local kern = node.id('kern')
-local whatsit = node.id('whatsit')
-
-local traverse_id = node.traverse_id
-local traverse = node.traverse
-local free_node = node.free
-local remove_node = node.remove
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
+function nodes.kern(k)
+ local n = new_node("kern",1)
+ n.kern = k
+ return n
+end
function nodes.remove(head, current, free_too)
local t = current
@@ -2390,423 +2614,27 @@ function nodes.delete(head,current)
return nodes.remove(head,current,true)
end
-nodes.before = insert_node_before
-nodes.after = insert_node_after
-
--- we need to test this, as it might be fixed now
-
-function nodes.before(h,c,n)
- if c then
- if c == h then
- n.next = h
- n.prev = nil
- h.prev = n
- else
- local cp = c.prev
- n.next = c
- n.prev = cp
- if cp then
- cp.next = n
- end
- c.prev = n
- return h, n
- end
- end
- return n, n
-end
-
-function nodes.after(h,c,n)
- if c then
- local cn = c.next
- if cn then
- n.next = cn
- cn.prev = n
- else
- n.next = nil
- end
- c.next = n
- n.prev = c
- return h, n
- end
- return n, n
-end
-
--- local h, c = nodes.replace(head,current,new)
--- local c = nodes.replace(false,current,new)
--- local c = nodes.replace(current,new)
-
-function nodes.replace(head,current,new) -- no head returned if false
- if not new then
- head, current, new = false, head, current
- end
- local prev, next = current.prev, current.next
- if next then
- new.next, next.prev = next, new
- end
- if prev then
- new.prev, prev.next = prev, new
- end
- if head then
- if head == current then
- head = new
- end
- free_node(current)
- return head, new
- else
- free_node(current)
- return new
- end
-end
-
--- will move
-
-local function count(stack,flat)
- local n = 0
- while stack do
- local id = stack.id
- if not flat and id == hlist or id == vlist then
- local list = stack.list
- if list then
- n = n + 1 + count(list) -- self counts too
- else
- n = n + 1
- end
- else
- n = n + 1
- end
- stack = stack.next
- end
- return n
-end
-
-nodes.count = count
-
--- new, will move
-
-function attributes.ofnode(n)
- local a = n.attr
- if a then
- local names = attributes.names
- a = a.next
- while a do
- local number, value = a.number, a.value
- texio.write_nl(format("%s : attribute %3i, value %4i, name %s",tostring(n),number,value,names[number] or '?'))
- a = a.next
- end
- end
-end
-
-local left, space = lpeg.P("<"), lpeg.P(" ")
-
-nodes.filterkey = left * (1-left)^0 * left * space^0 * lpeg.C((1-space)^0)
+nodes.before = node.insert_before
+nodes.after = node.insert_after
-end -- closure
-
-do -- begin closure to overcome local limits and interference
-
-if not modules then modules = { } end modules ['node-res'] = {
- version = 1.001,
- comment = "companion to node-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local gmatch, format = string.gmatch, string.format
-local copy_node, free_node, free_list, new_node, node_type, node_id = node.copy, node.free, node.flush_list, node.new, node.type, node.id
-local tonumber, round = tonumber, math.round
-
-local glyph_node = node_id("glyph")
-
---[[ldx--
-<p>The next function is not that much needed but in <l n='context'/> we use
-for debugging <l n='luatex'/> node management.</p>
---ldx]]--
-
-nodes = nodes or { }
-
-nodes.whatsits = { } -- table.swapped(node.whatsits())
-
-local reserved = { }
-local whatsits = nodes.whatsits
-
-for k, v in next, node.whatsits() do
- whatsits[k], whatsits[v] = v, k -- two way
-end
-
-local function register_node(n)
- reserved[#reserved+1] = n
- return n
-end
-
-nodes.register = register_node
-
-function nodes.cleanup_reserved(nofboxes) -- todo
- nodes.tracers.steppers.reset() -- todo: make a registration subsystem
- local nr, nl = #reserved, 0
- for i=1,nr do
- local ri = reserved[i]
- -- if not (ri.id == glue_spec and not ri.is_writable) then
- free_node(reserved[i])
- -- end
- end
- if nofboxes then
- local tb = tex.box
- for i=0,nofboxes do
- local l = tb[i]
- if l then
- free_node(tb[i])
- nl = nl + 1
- end
- end
- end
- reserved = { }
- return nr, nl, nofboxes -- can be nil
-end
-
-function nodes.usage()
- local t = { }
- for n, tag in gmatch(status.node_mem_usage,"(%d+) ([a-z_]+)") do
- t[tag] = n
- end
- return t
-end
-
-local disc = register_node(new_node("disc"))
-local kern = register_node(new_node("kern",1))
-local penalty = register_node(new_node("penalty"))
-local glue = register_node(new_node("glue")) -- glue.spec = nil
-local glue_spec = register_node(new_node("glue_spec"))
-local glyph = register_node(new_node("glyph",0))
-local textdir = register_node(new_node("whatsit",whatsits.dir)) -- 7 (6 is local par node)
-local rule = register_node(new_node("rule"))
-local latelua = register_node(new_node("whatsit",whatsits.late_lua)) -- 35
-local user_n = register_node(new_node("whatsit",whatsits.user_defined)) user_n.type = 100 -- 44
-local user_l = register_node(new_node("whatsit",whatsits.user_defined)) user_l.type = 110 -- 44
-local user_s = register_node(new_node("whatsit",whatsits.user_defined)) user_s.type = 115 -- 44
-local user_t = register_node(new_node("whatsit",whatsits.user_defined)) user_t.type = 116 -- 44
-local left_margin_kern = register_node(new_node("margin_kern",0))
-local right_margin_kern = register_node(new_node("margin_kern",1))
-local lineskip = register_node(new_node("glue",1))
-local baselineskip = register_node(new_node("glue",2))
-local leftskip = register_node(new_node("glue",8))
-local rightskip = register_node(new_node("glue",9))
-local temp = register_node(new_node("temp",0))
-
-function nodes.zeroglue(n)
- local s = n.spec
- return not writable or (
- s.width == 0
- and s.stretch == 0
- and s.shrink == 0
- and s.stretch_order == 0
- and s.shrink_order == 0
- )
-end
-
-function nodes.glyph(fnt,chr)
- local n = copy_node(glyph)
- if fnt then n.font = fnt end
- if chr then n.char = chr end
- return n
-end
-
-function nodes.penalty(p)
- local n = copy_node(penalty)
- n.penalty = p
- return n
-end
-
-function nodes.kern(k)
- local n = copy_node(kern)
- n.kern = k
- return n
-end
-
-function nodes.glue_spec(width,stretch,shrink)
- local s = copy_node(glue_spec)
- s.width, s.stretch, s.shrink = width, stretch, shrink
- return s
-end
-
-local function someskip(skip,width,stretch,shrink)
- local n = copy_node(skip)
- if not width then
- -- no spec
- elseif tonumber(width) then
- local s = copy_node(glue_spec)
- s.width, s.stretch, s.shrink = width, stretch, shrink
- n.spec = s
- else
- -- shared
- n.spec = copy_node(width)
- end
- return n
-end
-
-function nodes.glue(width,stretch,shrink)
- return someskip(glue,width,stretch,shrink)
-end
-function nodes.leftskip(width,stretch,shrink)
- return someskip(leftskip,width,stretch,shrink)
-end
-function nodes.rightskip(width,stretch,shrink)
- return someskip(rightskip,width,stretch,shrink)
-end
-function nodes.lineskip(width,stretch,shrink)
- return someskip(lineskip,width,stretch,shrink)
-end
-function nodes.baselineskip(width,stretch,shrink)
- return someskip(baselineskip,width,stretch,shrink)
-end
-
-function nodes.disc()
- return copy_node(disc)
-end
-
-function nodes.textdir(dir)
- local t = copy_node(textdir)
- t.dir = dir
- return t
-end
-
-function nodes.rule(width,height,depth,dir)
- local n = copy_node(rule)
- if width then n.width = width end
- if height then n.height = height end
- if depth then n.depth = depth end
- if dir then n.dir = dir end
- return n
-end
-
-function nodes.latelua(code)
- local n = copy_node(latelua)
- n.data = code
- return n
-end
-
-function nodes.leftmarginkern(glyph,width)
- local n = copy_node(left_margin_kern)
- if not glyph then
- logs.fatal("nodes","invalid pointer to left margin glyph node")
- elseif glyph.id ~= glyph_node then
- logs.fatal("nodes","invalid node type %s for left margin glyph node",node_type(glyph))
- else
- n.glyph = glyph
- end
- if width then
- n.width = width
- end
- return n
-end
-
-function nodes.rightmarginkern(glyph,width)
- local n = copy_node(right_margin_kern)
- if not glyph then
- logs.fatal("nodes","invalid pointer to right margin glyph node")
- elseif glyph.id ~= glyph_node then
- logs.fatal("nodes","invalid node type %s for right margin glyph node",node_type(p))
- else
- n.glyph = glyph
- end
- if width then
- n.width = width
- end
- return n
-end
+-- attributes
-function nodes.temp()
- return copy_node(temp)
-end
---[[
-<p>At some point we ran into a problem that the glue specification
-of the zeropoint dimension was overwritten when adapting a glue spec
-node. This is a side effect of glue specs being shared. After a
-couple of hours tracing and debugging Taco and I came to the
-conclusion that it made no sense to complicate the spec allocator
-and settled on a writable flag. This all is a side effect of the
-fact that some glues use reserved memory slots (with the zeropoint
-glue being a noticeable one). So, next we wrap this into a function
-and hide it for the user. And yes, LuaTeX now gives a warning as
-well.</p>
-]]--
-
-if tex.luatexversion > 51 then
+attributes.unsetvalue = -0x7FFFFFFF
- function nodes.writable_spec(n)
- local spec = n.spec
- if not spec then
- spec = copy_node(glue_spec)
- n.spec = spec
- elseif not spec.writable then
- spec = copy_node(spec)
- n.spec = spec
- end
- return spec
- end
+local numbers, last = { }, 127
-else
-
- function nodes.writable_spec(n)
- local spec = n.spec
- if not spec then
- spec = copy_node(glue_spec)
- else
- spec = copy_node(spec)
+function attributes.private(name)
+ local number = numbers[name]
+ if not number then
+ if last < 255 then
+ last = last + 1
end
- n.spec = spec
- return spec
- end
-
-end
-
-local cache = { }
-
-function nodes.usernumber(num)
- local n = cache[num]
- if n then
- return copy_node(n)
- else
- local n = copy_node(user_n)
- if num then n.value = num end
- return n
- end
-end
-
-function nodes.userlist(list)
- local n = copy_node(user_l)
- if list then n.value = list end
- return n
-end
-
-local cache = { } -- we could use the same cache
-
-function nodes.userstring(str)
- local n = cache[str]
- if n then
- return copy_node(n)
- else
- local n = copy_node(user_s)
- n.type = 115
- if str then n.value = str end
- return n
+ number = last
+ numbers[name] = number
end
+ return number
end
-function nodes.usertokens(tokens)
- local n = copy_node(user_t)
- if tokens then n.value = tokens end
- return n
-end
-
-statistics.register("cleaned up reserved nodes", function()
- return format("%s nodes, %s lists of %s", nodes.cleanup_reserved(tex.count["lastallocatedbox"]))
-end) -- \topofboxstack
-
-statistics.register("node memory usage", function() -- comes after cleanup !
- return status.node_mem_usage
-end)
-
end -- closure
do -- begin closure to overcome local limits and interference
@@ -2830,6 +2658,8 @@ local next = next
local trace_injections = false trackers.register("nodes.injections", function(v) trace_injections = v end)
+local report_injections = logs.new("injections")
+
fonts = fonts or { }
fonts.tfm = fonts.tfm or { }
fonts.ids = fonts.ids or { }
@@ -2840,6 +2670,7 @@ local glyph = node.id('glyph')
local kern = node.id('kern')
local traverse_id = node.traverse_id
+local unset_attribute = node.unset_attribute
local has_attribute = node.has_attribute
local set_attribute = node.set_attribute
local insert_node_before = node.insert_before
@@ -2901,8 +2732,10 @@ function nodes.set_kern(current,factor,rlmode,x,tfmchr)
local bound = #kerns + 1
set_attribute(current,kernpair,bound)
kerns[bound] = { rlmode, dx }
+ return dx, bound
+ else
+ return 0, 0
end
- return dx, bound
end
function nodes.set_mark(start,base,factor,rlmode,ba,ma,index) --ba=baseanchor, ma=markanchor
@@ -2917,7 +2750,7 @@ function nodes.set_mark(start,base,factor,rlmode,ba,ma,index) --ba=baseanchor, m
set_attribute(start,markdone,index)
return dx, dy, bound
else
- logs.report("nodes mark", "possible problem, U+%04X is base without data (id: %s)",base.char,bound)
+ report_injections("possible problem, U+%04X is base mark without data (id: %s)",base.char,bound)
end
end
index = index or 1
@@ -2933,10 +2766,7 @@ function nodes.trace_injection(head)
local function dir(n)
return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or ("unset")
end
- local function report(...)
- logs.report("nodes finisher",...)
- end
- report("begin run")
+ report_injections("begin run")
for n in traverse_id(glyph,head) do
if n.subtype < 256 then
local kp = has_attribute(n,kernpair)
@@ -2945,45 +2775,46 @@ function nodes.trace_injection(head)
local md = has_attribute(n,markdone)
local cb = has_attribute(n,cursbase)
local cc = has_attribute(n,curscurs)
- report("char U+%05X, font=%s",n.char,n.font)
+ report_injections("char U+%05X, font=%s",n.char,n.font)
if kp then
local k = kerns[kp]
if k[3] then
- report(" pairkern: dir=%s, x=%s, y=%s, w=%s, h=%s",dir(k[1]),k[2] or "?",k[3] or "?",k[4] or "?",k[5] or "?")
+ report_injections(" pairkern: dir=%s, x=%s, y=%s, w=%s, h=%s",dir(k[1]),k[2] or "?",k[3] or "?",k[4] or "?",k[5] or "?")
else
- report(" kern: dir=%s, dx=%s",dir(k[1]),k[2] or "?")
+ report_injections(" kern: dir=%s, dx=%s",dir(k[1]),k[2] or "?")
end
end
if mb then
- report(" markbase: bound=%s",mb)
+ report_injections(" markbase: bound=%s",mb)
end
if mm then
local m = marks[mm]
if mb then
local m = m[mb]
if m then
- report(" markmark: bound=%s, index=%s, dx=%s, dy=%s",mm,md or "?",m[1] or "?",m[2] or "?")
+ report_injections(" markmark: bound=%s, index=%s, dx=%s, dy=%s",mm,md or "?",m[1] or "?",m[2] or "?")
else
- report(" markmark: bound=%s, missing index",mm)
+ report_injections(" markmark: bound=%s, missing index",mm)
end
else
m = m[1]
- report(" markmark: bound=%s, dx=%s, dy=%s",mm,m[1] or "?",m[2] or "?")
+ report_injections(" markmark: bound=%s, dx=%s, dy=%s",mm,m[1] or "?",m[2] or "?")
end
end
if cb then
- report(" cursbase: bound=%s",cb)
+ report_injections(" cursbase: bound=%s",cb)
end
if cc then
local c = cursives[cc]
- report(" curscurs: bound=%s, dir=%s, dx=%s, dy=%s",cc,dir(c[1]),c[2] or "?",c[3] or "?")
+ report_injections(" curscurs: bound=%s, dir=%s, dx=%s, dy=%s",cc,dir(c[1]),c[2] or "?",c[3] or "?")
end
end
end
- report("end run")
+ report_injections("end run")
end
-- todo: reuse tables (i.e. no collection), but will be extra fields anyway
+-- todo: check for attribute
function nodes.inject_kerns(head,where,keep)
local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns)
@@ -3006,6 +2837,7 @@ function nodes.inject_kerns(head,where,keep)
mk[n] = tm[n.char]
local k = has_attribute(n,kernpair)
if k then
+--~ unset_attribute(k,kernpair)
local kk = kerns[k]
if kk then
local x, y, w, h = kk[2] or 0, kk[3] or 0, kk[4] or 0, kk[5] or 0
@@ -3155,39 +2987,21 @@ function nodes.inject_kerns(head,where,keep)
-- only w can be nil, can be sped up when w == nil
local rl, x, w, r2l = k[1], k[2] or 0, k[4] or 0, k[6]
local wx = w - x
---~ if rl < 0 then
---~ if r2l then
---~ if wx ~= 0 then
---~ insert_node_before(head,n,newkern(wx))
---~ end
---~ if x ~= 0 then
---~ insert_node_after (head,n,newkern(x))
---~ end
---~ else
---~ if x ~= 0 then
---~ insert_node_before(head,n,newkern(x))
---~ end
---~ if wx ~= 0 then
---~ insert_node_after(head,n,newkern(wx))
---~ end
---~ end
---~ else
- if r2l then
- if wx ~= 0 then
- insert_node_before(head,n,newkern(wx))
- end
- if x ~= 0 then
- insert_node_after (head,n,newkern(x))
- end
- else
- if x ~= 0 then
- insert_node_before(head,n,newkern(x))
- end
- if wx ~= 0 then
- insert_node_after(head,n,newkern(wx))
- end
+ if r2l then
+ if wx ~= 0 then
+ insert_node_before(head,n,newkern(wx))
end
---~ end
+ if x ~= 0 then
+ insert_node_after (head,n,newkern(x))
+ end
+ else
+ if x ~= 0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ if wx ~= 0 then
+ insert_node_after(head,n,newkern(wx))
+ end
+ end
end
end
if next(cx) then
@@ -3214,35 +3028,19 @@ function nodes.inject_kerns(head,where,keep)
nodes.trace_injection(head)
end
for n in traverse_id(glyph,head) do
- local k = has_attribute(n,kernpair)
- if k then
- local kk = kerns[k]
- if kk then
- local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4]
- if y and y ~= 0 then
- n.yoffset = y -- todo: h ?
- end
- if w then
- -- copied from above
- local r2l = kk[6]
- local wx = w - x
---~ if rl < 0 then
---~ if r2l then
---~ if x ~= 0 then
---~ insert_node_before(head,n,newkern(x))
---~ end
---~ if wx ~= 0 then
---~ insert_node_after(head,n,newkern(wx))
---~ end
---~ else
---~ if wx ~= 0 then
---~ insert_node_before(head,n,newkern(wx))
---~ end
---~ if x ~= 0 then
---~ insert_node_after (head,n,newkern(x))
---~ end
---~ end
---~ else
+ if n.subtype < 256 then
+ local k = has_attribute(n,kernpair)
+ if k then
+ local kk = kerns[k]
+ if kk then
+ local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4]
+ if y and y ~= 0 then
+ n.yoffset = y -- todo: h ?
+ end
+ if w then
+ -- copied from above
+ local r2l = kk[6]
+ local wx = w - x
if r2l then
if wx ~= 0 then
insert_node_before(head,n,newkern(wx))
@@ -3258,11 +3056,11 @@ function nodes.inject_kerns(head,where,keep)
insert_node_after(head,n,newkern(wx))
end
end
---~ end
- else
- -- simple (e.g. kernclass kerns)
- if x ~= 0 then
- insert_node_before(head,n,newkern(x))
+ else
+ -- simple (e.g. kernclass kerns)
+ if x ~= 0 then
+ insert_node_before(head,n,newkern(x))
+ end
end
end
end
@@ -3282,242 +3080,6 @@ end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['node-fnt'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local next, type = next, type
-
-local trace_characters = false trackers.register("nodes.characters", function(v) trace_characters = v end)
-
-local glyph = node.id('glyph')
-
-local traverse_id = node.traverse_id
-local has_attribute = node.has_attribute
-
-local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming
-
-fonts = fonts or { }
-fonts.tfm = fonts.tfm or { }
-fonts.ids = fonts.ids or { }
-
-local fontdata = fonts.ids
-
--- some tests with using an array of dynamics[id] and processes[id] demonstrated
--- that there was nothing to gain (unless we also optimize other parts)
---
--- maybe getting rid of the intermediate shared can save some time
-
--- potential speedup: check for subtype < 256 so that we can remove that test
--- elsewhere, danger: injected nodes will not be dealt with but that does not
--- happen often; we could consider processing sublists but that might need mor
--- checking later on; the current approach also permits variants
-
-if tex.attribute[0] < 0 then
-
- texio.write_nl("log","!")
- texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be")
- texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special")
- texio.write_nl("log","! purposed so setting them at the TeX end might break the font handler.")
- texio.write_nl("log","!")
-
- tex.attribute[0] = 0 -- else no features
-
-end
-
--- this will be redone and split in a generic one and a context one
-
-function nodes.process_characters(head)
- -- either next or not, but definitely no already processed list
- starttiming(nodes)
- local usedfonts, attrfonts, done = { }, { }, false
- local a, u, prevfont, prevattr = 0, 0, nil, 0
- for n in traverse_id(glyph,head) do
- local font, attr = n.font, has_attribute(n,0) -- zero attribute is reserved for fonts in context
- if attr and attr > 0 then
- if font ~= prevfont or attr ~= prevattr then
- local used = attrfonts[font]
- if not used then
- used = { }
- attrfonts[font] = used
- end
- if not used[attr] then
- -- we do some testing outside the function
- local tfmdata = fontdata[font]
- local shared = tfmdata.shared
- if shared then
- local dynamics = shared.dynamics
- if dynamics then
- local d = shared.set_dynamics(font,dynamics,attr) -- still valid?
- if d then
- used[attr] = d
- a = a + 1
- end
- end
- end
- end
- prevfont, prevattr = font, attr
- end
- elseif font ~= prevfont then
- prevfont, prevattr = font, 0
- local used = usedfonts[font]
- if not used then
- local tfmdata = fontdata[font]
- if tfmdata then
- local shared = tfmdata.shared -- we need to check shared, only when same features
- if shared then
- local processors = shared.processes
- if processors and #processors > 0 then
- usedfonts[font] = processors
- u = u + 1
- end
- end
- else
- -- probably nullfont
- end
- end
- else
- prevattr = attr
- end
- end
- -- we could combine these and just make the attribute nil
- if u == 1 then
- local font, processors = next(usedfonts)
- local n = #processors
- if n > 0 then
- local h, d = processors[1](head,font,false)
- head, done = h or head, done or d
- if n > 1 then
- for i=2,n do
- local h, d = processors[i](head,font,false)
- head, done = h or head, done or d
- end
- end
- end
- elseif u > 0 then
- for font, processors in next, usedfonts do
- local n = #processors
- local h, d = processors[1](head,font,false)
- head, done = h or head, done or d
- if n > 1 then
- for i=2,n do
- local h, d = processors[i](head,font,false)
- head, done = h or head, done or d
- end
- end
- end
- end
- if a == 1 then
- local font, dynamics = next(attrfonts)
- for attribute, processors in next, dynamics do -- attr can switch in between
- local n = #processors
- local h, d = processors[1](head,font,attribute)
- head, done = h or head, done or d
- if n > 1 then
- for i=2,n do
- local h, d = processors[i](head,font,attribute)
- head, done = h or head, done or d
- end
- end
- end
- elseif a > 0 then
- for font, dynamics in next, attrfonts do
- for attribute, processors in next, dynamics do -- attr can switch in between
- local n = #processors
- local h, d = processors[1](head,font,attribute)
- head, done = h or head, done or d
- if n > 1 then
- for i=2,n do
- local h, d = processors[i](head,font,attribute)
- head, done = h or head, done or d
- end
- end
- end
- end
- end
- stoptiming(nodes)
- if trace_characters then
- nodes.report(head,done)
- end
- return head, true
-end
-
-if node.protect_glyphs then
-
- nodes.protect_glyphs = node.protect_glyphs
- nodes.unprotect_glyphs = node.unprotect_glyphs
-
-else do
-
- -- initial value subtype : X000 0001 = 1 = 0x01 = char
- --
- -- expected before linebreak : X000 0000 = 0 = 0x00 = glyph
- -- X000 0010 = 2 = 0x02 = ligature
- -- X000 0100 = 4 = 0x04 = ghost
- -- X000 1010 = 10 = 0x0A = leftboundary lig
- -- X001 0010 = 18 = 0x12 = rightboundary lig
- -- X001 1010 = 26 = 0x1A = both boundaries lig
- -- X000 1100 = 12 = 0x1C = leftghost
- -- X001 0100 = 20 = 0x14 = rightghost
-
- function nodes.protect_glyphs(head)
- local done = false
- for g in traverse_id(glyph,head) do
- local s = g.subtype
- if s == 1 then
- done, g.subtype = true, 256
- elseif s <= 256 then
- done, g.subtype = true, 256 + s
- end
- end
- return done
- end
-
- function nodes.unprotect_glyphs(head)
- local done = false
- for g in traverse_id(glyph,head) do
- local s = g.subtype
- if s > 256 then
- done, g.subtype = true, s - 256
- end
- end
- return done
- end
-
-end end
-
-end -- closure
-
-do -- begin closure to overcome local limits and interference
-
-if not modules then modules = { } end modules ['node-dum'] = {
- version = 1.001,
- comment = "companion to luatex-*.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-nodes = nodes or { }
-
-function nodes.simple_font_handler(head)
--- lang.hyphenate(head)
- head = nodes.process_characters(head)
- nodes.inject_kerns(head)
- nodes.protect_glyphs(head)
- head = node.ligaturing(head)
- head = node.kerning(head)
- return head
-end
-
-end -- closure
-
-do -- begin closure to overcome local limits and interference
-
if not modules then modules = { } end modules ['font-ini'] = {
version = 1.001,
comment = "companion to font-ini.mkiv",
@@ -3533,6 +3095,9 @@ if not modules then modules = { } end modules ['font-ini'] = {
local utf = unicode.utf8
local format, serialize = string.format, table.serialize
local write_nl = texio.write_nl
+local lower = string.lower
+
+local report_define = logs.new("define fonts")
if not fontloader then fontloader = fontforge end
@@ -3604,12 +3169,12 @@ end
fonts.formats = { }
function fonts.fontformat(filename,default)
- local extname = file.extname(filename)
+ local extname = lower(file.extname(filename))
local format = fonts.formats[extname]
if format then
return format
else
- logs.report("fonts define","unable to detemine font format for '%s'",filename)
+ report_define("unable to determine font format for '%s'",filename)
return default
end
end
@@ -3634,6 +3199,8 @@ local concat, sortedkeys, utfbyte, serialize = table.concat, table.sortedkeys, u
local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
local trace_scaling = false trackers.register("fonts.scaling" , function(v) trace_scaling = v end)
+local report_define = logs.new("define fonts")
+
-- tfmdata has also fast access to indices and unicodes
-- to be checked: otf -> tfm -> tfmscaled
--
@@ -3672,11 +3239,13 @@ tfm.fontname_mode = "fullpath"
tfm.enhance = tfm.enhance or function() end
+fonts.formats.tfm = "type1" -- we need to have at least a value here
+
function tfm.read_from_tfm(specification)
local fname, tfmdata = specification.filename or "", nil
if fname ~= "" then
if trace_defining then
- logs.report("define font","loading tfm file %s at size %s",fname,specification.size)
+ report_define("loading tfm file %s at size %s",fname,specification.size)
end
tfmdata = font.read_tfm(fname,specification.size) -- not cached, fast enough
if tfmdata then
@@ -3699,7 +3268,7 @@ function tfm.read_from_tfm(specification)
tfm.enhance(tfmdata,specification)
end
elseif trace_defining then
- logs.report("define font","loading tfm with name %s fails",specification.name)
+ report_define("loading tfm with name %s fails",specification.name)
end
return tfmdata
end
@@ -3867,12 +3436,12 @@ function tfm.do_scale(tfmtable, scaledpoints, relativeid)
local nodemode = tfmtable.mode == "node"
local hasquality = tfmtable.auto_expand or tfmtable.auto_protrude
local hasitalic = tfmtable.has_italic
+ local descriptions = tfmtable.descriptions or { }
--
t.parameters = { }
t.characters = { }
t.MathConstants = { }
-- fast access
- local descriptions = tfmtable.descriptions or { }
t.unicodes = tfmtable.unicodes
t.indices = tfmtable.indices
t.marks = tfmtable.marks
@@ -3975,7 +3544,7 @@ t.colorscheme = tfmtable.colorscheme
end
end
-- if trace_scaling then
- -- logs.report("define font","t=%s, u=%s, i=%s, n=%s c=%s",k,chr.tounicode or k,description.index,description.name or '-',description.class or '-')
+ -- report_define("t=%s, u=%s, i=%s, n=%s c=%s",k,chr.tounicode or k,description.index,description.name or '-',description.class or '-')
-- end
if tounicode then
local tu = tounicode[index] -- nb: index!
@@ -4011,6 +3580,9 @@ t.colorscheme = tfmtable.colorscheme
local vn = v.next
if vn then
chr.next = vn
+ --~ if v.vert_variants or v.horiz_variants then
+ --~ report_define("glyph 0x%05X has combination of next, vert_variants and horiz_variants",index)
+ --~ end
else
local vv = v.vert_variants
if vv then
@@ -4180,11 +3752,11 @@ t.colorscheme = tfmtable.colorscheme
-- can have multiple subfonts
if hasmath then
if trace_defining then
- logs.report("define font","math enabled for: name '%s', fullname: '%s', filename: '%s'",t.name or "noname",t.fullname or "nofullname",t.filename or "nofilename")
+ report_define("math enabled for: name '%s', fullname: '%s', filename: '%s'",t.name or "noname",t.fullname or "nofullname",t.filename or "nofilename")
end
else
if trace_defining then
- logs.report("define font","math disabled for: name '%s', fullname: '%s', filename: '%s'",t.name or "noname",t.fullname or "nofullname",t.filename or "nofilename")
+ report_define("math disabled for: name '%s', fullname: '%s', filename: '%s'",t.name or "noname",t.fullname or "nofullname",t.filename or "nofilename")
end
t.nomath, t.MathConstants = true, nil
end
@@ -4193,8 +3765,8 @@ t.colorscheme = tfmtable.colorscheme
t.psname = t.fontname or (t.fullname and fonts.names.cleanname(t.fullname))
end
if trace_defining then
- logs.report("define font","used for accesing subfont: '%s'",t.psname or "nopsname")
- logs.report("define font","used for subsetting: '%s'",t.fontname or "nofontname")
+ report_define("used for accesing subfont: '%s'",t.psname or "nopsname")
+ report_define("used for subsetting: '%s'",t.fontname or "nofontname")
end
--~ print(t.fontname,table.serialize(t.MathConstants))
return t, delta
@@ -4333,18 +3905,18 @@ function tfm.checked_filename(metadata,whatever)
if askedfilename ~= "" then
foundfilename = resolvers.findbinfile(askedfilename,"") or ""
if foundfilename == "" then
- logs.report("fonts","source file '%s' is not found",askedfilename)
+ report_define("source file '%s' is not found",askedfilename)
foundfilename = resolvers.findbinfile(file.basename(askedfilename),"") or ""
if foundfilename ~= "" then
- logs.report("fonts","using source file '%s' (cache mismatch)",foundfilename)
+ report_define("using source file '%s' (cache mismatch)",foundfilename)
end
end
elseif whatever then
- logs.report("fonts","no source file for '%s'",whatever)
+ report_define("no source file for '%s'",whatever)
foundfilename = ""
end
metadata.foundfilename = foundfilename
- -- logs.report("fonts","using source file '%s'",foundfilename)
+ -- report_define("using source file '%s'",foundfilename)
end
return foundfilename
end
@@ -4373,6 +3945,8 @@ local lpegmatch = lpeg.match
local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
+local report_otf = logs.new("load otf")
+
fonts = fonts or { }
fonts.cid = fonts.cid or { }
fonts.cid.map = fonts.cid.map or { }
@@ -4447,14 +4021,14 @@ local function locate(registry,ordering,supplement)
local cidmap = fonts.cid.map[hashname]
if not cidmap then
if trace_loading then
- logs.report("load otf","checking cidmap, registry: %s, ordering: %s, supplement: %s, filename: %s",registry,ordering,supplement,filename)
+ report_otf("checking cidmap, registry: %s, ordering: %s, supplement: %s, filename: %s",registry,ordering,supplement,filename)
end
local fullname = resolvers.find_file(filename,'cid') or ""
if fullname ~= "" then
cidmap = fonts.cid.load(fullname)
if cidmap then
if trace_loading then
- logs.report("load otf","using cidmap file %s",filename)
+ report_otf("using cidmap file %s",filename)
end
fonts.cid.map[hashname] = cidmap
cidmap.usedname = file.basename(filename)
@@ -4469,7 +4043,7 @@ function fonts.cid.getmap(registry,ordering,supplement)
-- cf Arthur R. we can safely scan upwards since cids are downward compatible
local supplement = tonumber(supplement)
if trace_loading then
- logs.report("load otf","needed cidmap, registry: %s, ordering: %s, supplement: %s",registry,ordering,supplement)
+ report_otf("needed cidmap, registry: %s, ordering: %s, supplement: %s",registry,ordering,supplement)
end
local cidmap = locate(registry,ordering,supplement)
if not cidmap then
@@ -5209,7 +4783,6 @@ function otf.meanings.normalize(features)
k = lower(k)
if k == "language" or k == "lang" then
v = gsub(lower(v),"[^a-z0-9%-]","")
- k = language
if not languages[v] then
h.language = to_languages[v] or "dflt"
else
@@ -5488,6 +5061,8 @@ local utfbyte = utf.byte
local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
local trace_unimapping = false trackers.register("otf.unimapping", function(v) trace_unimapping = v end)
+local report_otf = logs.new("load otf")
+
local ctxcatcodes = tex and tex.ctxcatcodes
--[[ldx--
@@ -5504,7 +5079,7 @@ local function load_lum_table(filename) -- will move to font goodies
local lumfile = resolvers.find_file(lumname,"map") or ""
if lumfile ~= "" and lfs.isfile(lumfile) then
if trace_loading or trace_unimapping then
- logs.report("load otf","enhance: loading %s ",lumfile)
+ report_otf("enhance: loading %s ",lumfile)
end
lumunic = dofile(lumfile)
return lumunic, lumfile
@@ -5729,14 +5304,14 @@ fonts.map.add_to_unicode = function(data,filename)
for index, glyph in table.sortedhash(data.glyphs) do
local toun, name, unic = tounicode[index], glyph.name, glyph.unicode or -1 -- play safe
if toun then
- logs.report("load otf","internal: 0x%05X, name: %s, unicode: 0x%05X, tounicode: %s",index,name,unic,toun)
+ report_otf("internal: 0x%05X, name: %s, unicode: 0x%05X, tounicode: %s",index,name,unic,toun)
else
- logs.report("load otf","internal: 0x%05X, name: %s, unicode: 0x%05X",index,name,unic)
+ report_otf("internal: 0x%05X, name: %s, unicode: 0x%05X",index,name,unic)
end
end
end
if trace_loading and (ns > 0 or nl > 0) then
- logs.report("load otf","enhance: %s tounicode entries added (%s ligatures)",nl+ns, ns)
+ report_otf("enhance: %s tounicode entries added (%s ligatures)",nl+ns, ns)
end
end
@@ -5857,10 +5432,11 @@ if not modules then modules = { } end modules ['font-otf'] = {
local utf = unicode.utf8
-local concat, getn, utfbyte = table.concat, table.getn, utf.byte
+local concat, utfbyte = table.concat, utf.byte
local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
local type, next, tonumber, tostring = type, next, tonumber, tostring
local abs = math.abs
+local getn = table.getn
local lpegmatch = lpeg.match
local trace_private = false trackers.register("otf.private", function(v) trace_private = v end)
@@ -5871,6 +5447,8 @@ local trace_sequences = false trackers.register("otf.sequences", function(v
local trace_math = false trackers.register("otf.math", function(v) trace_math = v end)
local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
+local report_otf = logs.new("load otf")
+
--~ trackers.enable("otf.loading")
--[[ldx--
@@ -5930,7 +5508,7 @@ otf.features.default = otf.features.default or { }
otf.enhancers = otf.enhancers or { }
otf.glists = { "gsub", "gpos" }
-otf.version = 2.650 -- beware: also sync font-mis.lua
+otf.version = 2.653 -- beware: also sync font-mis.lua
otf.pack = true -- beware: also sync font-mis.lua
otf.syncspace = true
otf.notdef = false
@@ -6031,7 +5609,7 @@ local function load_featurefile(ff,featurefile)
featurefile = resolvers.find_file(file.addsuffix(featurefile,'fea'),'fea')
if featurefile and featurefile ~= "" then
if trace_loading then
- logs.report("load otf", "featurefile: %s", featurefile)
+ report_otf("featurefile: %s", featurefile)
end
fontloader.apply_featurefile(ff, featurefile)
end
@@ -6042,7 +5620,7 @@ function otf.enhance(name,data,filename,verbose)
local enhancer = otf.enhancers[name]
if enhancer then
if (verbose ~= nil and verbose) or trace_loading then
- logs.report("load otf","enhance: %s (%s)",name,filename)
+ report_otf("enhance: %s (%s)",name,filename)
end
enhancer(data,filename)
end
@@ -6070,6 +5648,8 @@ local enhancers = {
function otf.load(filename,format,sub,featurefile)
local name = file.basename(file.removesuffix(filename))
+ local attr = lfs.attributes(filename)
+ local size, time = attr.size or 0, attr.modification or 0
if featurefile then
name = name .. "@" .. file.removesuffix(file.basename(featurefile))
end
@@ -6079,10 +5659,9 @@ function otf.load(filename,format,sub,featurefile)
hash = hash .. "-" .. sub
end
hash = containers.cleanname(hash)
- local data = containers.read(otf.cache(), hash)
- local size = lfs.attributes(filename,"size") or 0
- if not data or data.verbose ~= fonts.verbose or data.size ~= size then
- logs.report("load otf","loading: %s (hash: %s)",filename,hash)
+ local data = containers.read(otf.cache,hash)
+ if not data or data.verbose ~= fonts.verbose or data.size ~= size or data.time ~= time then
+ report_otf("loading: %s (hash: %s)",filename,hash)
local ff, messages
if sub then
ff, messages = fontloader.open(filename,sub)
@@ -6091,22 +5670,22 @@ function otf.load(filename,format,sub,featurefile)
end
if trace_loading and messages and #messages > 0 then
if type(messages) == "string" then
- logs.report("load otf","warning: %s",messages)
+ report_otf("warning: %s",messages)
else
for m=1,#messages do
- logs.report("load otf","warning: %s",tostring(messages[m]))
+ report_otf("warning: %s",tostring(messages[m]))
end
end
else
- logs.report("load otf","font loaded okay")
+ report_otf("font loaded okay")
end
if ff then
load_featurefile(ff,featurefile)
data = fontloader.to_table(ff)
fontloader.close(ff)
if data then
- logs.report("load otf","file size: %s", size)
- logs.report("load otf","enhancing ...")
+ report_otf("file size: %s", size)
+ report_otf("enhancing ...")
for e=1,#enhancers do
otf.enhance(enhancers[e],data,filename)
io.flush() -- we want instant messages
@@ -6115,22 +5694,23 @@ function otf.load(filename,format,sub,featurefile)
otf.enhance("pack",data,filename)
end
data.size = size
+ data.time = time
data.verbose = fonts.verbose
- logs.report("load otf","saving in cache: %s",filename)
- data = containers.write(otf.cache(), hash, data)
+ report_otf("saving in cache: %s",filename)
+ data = containers.write(otf.cache, hash, data)
collectgarbage("collect")
- data = containers.read(otf.cache(), hash) -- this frees the old table and load the sparse one
+ data = containers.read(otf.cache, hash) -- this frees the old table and load the sparse one
collectgarbage("collect")
else
- logs.report("load otf","loading failed (table conversion error)")
+ report_otf("loading failed (table conversion error)")
end
else
- logs.report("load otf","loading failed (file read error)")
+ report_otf("loading failed (file read error)")
end
end
if data then
if trace_defining then
- logs.report("define font","loading from cache: %s",hash)
+ report_otf("loading from cache: %s",hash)
end
otf.enhance("unpack",data,filename,false) -- no message here
otf.add_dimensions(data)
@@ -6181,8 +5761,8 @@ function otf.show_feature_order(otfdata,filename)
local sequences = otfdata.luatex.sequences
if sequences and #sequences > 0 then
if trace_loading then
- logs.report("otf check","font %s has %s sequences",filename,#sequences)
- logs.report("otf check"," ")
+ report_otf("font %s has %s sequences",filename,#sequences)
+ report_otf(" ")
end
for nos=1,#sequences do
local sequence = sequences[nos]
@@ -6191,7 +5771,7 @@ function otf.show_feature_order(otfdata,filename)
local subtables = sequence.subtables or { "no-subtables" }
local features = sequence.features
if trace_loading then
- logs.report("otf check","%3i %-15s %-20s [%s]",nos,name,typ,concat(subtables,","))
+ report_otf("%3i %-15s %-20s [%s]",nos,name,typ,concat(subtables,","))
end
if features then
for feature, scripts in next, features do
@@ -6204,16 +5784,16 @@ function otf.show_feature_order(otfdata,filename)
tt[#tt+1] = format("[%s: %s]",script,concat(ttt," "))
end
if trace_loading then
- logs.report("otf check"," %s: %s",feature,concat(tt," "))
+ report_otf(" %s: %s",feature,concat(tt," "))
end
end
end
end
if trace_loading then
- logs.report("otf check","\n")
+ report_otf("\n")
end
elseif trace_loading then
- logs.report("otf check","font %s has no sequences",filename)
+ report_otf("font %s has no sequences",filename)
end
end
@@ -6428,7 +6008,7 @@ otf.enhancers["merge cid fonts"] = function(data,filename)
-- save us some more memory (at the cost of harder tracing)
if data.subfonts then
if data.glyphs and next(data.glyphs) then
- logs.report("load otf","replacing existing glyph table due to subfonts")
+ report_otf("replacing existing glyph table due to subfonts")
end
local cidinfo = data.cidinfo
local verbose = fonts.verbose
@@ -6462,17 +6042,17 @@ otf.enhancers["merge cid fonts"] = function(data,filename)
subfont.glyphs = nil
end
if trace_loading then
- logs.report("load otf","cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes, nofnames, nofunicodes+nofnames)
+ report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes, nofnames, nofunicodes+nofnames)
end
data.glyphs = glyphs
data.map = data.map or { }
data.map.map = uni_to_int
data.map.backmap = int_to_uni
elseif trace_loading then
- logs.report("load otf","unable to remap cid font, missing cid file for %s",filename)
+ report_otf("unable to remap cid font, missing cid file for %s",filename)
end
elseif trace_loading then
- logs.report("load otf","font %s has no glyphs",filename)
+ report_otf("font %s has no glyphs",filename)
end
end
end
@@ -6484,11 +6064,11 @@ otf.enhancers["prepare unicode"] = function(data,filename)
local glyphs = data.glyphs
local mapmap = data.map
if not mapmap then
- logs.report("load otf","no map in %s",filename)
+ report_otf("no map in %s",filename)
mapmap = { }
data.map = { map = mapmap }
elseif not mapmap.map then
- logs.report("load otf","no unicode map in %s",filename)
+ report_otf("no unicode map in %s",filename)
mapmap = { }
data.map.map = mapmap
else
@@ -6507,7 +6087,7 @@ otf.enhancers["prepare unicode"] = function(data,filename)
unicodes[name] = private
internals[index] = true
if trace_private then
- logs.report("load otf","enhance: glyph %s at index U+%04X is moved to private unicode slot U+%04X",name,index,private)
+ report_otf("enhance: glyph %s at index U+%04X is moved to private unicode slot U+%04X",name,index,private)
end
private = private + 1
else
@@ -6550,9 +6130,9 @@ otf.enhancers["prepare unicode"] = function(data,filename)
end
if trace_loading then
if #multiples > 0 then
- logs.report("load otf","%s glyph are reused: %s",#multiples, concat(multiples," "))
+ report_otf("%s glyph are reused: %s",#multiples, concat(multiples," "))
else
- logs.report("load otf","no glyph are reused")
+ report_otf("no glyph are reused")
end
end
luatex.indices = indices
@@ -6638,14 +6218,12 @@ otf.enhancers["check math"] = function(data,filename)
if hv then
math.horiz_variants = hv.variants
local p = hv.parts
- if p then
- if #p>0 then
- for i=1,#p do
- local pi = p[i]
- pi.glyph = unicodes[pi.component] or 0
- end
- math.horiz_parts = p
+ if p and #p > 0 then
+ for i=1,#p do
+ local pi = p[i]
+ pi.glyph = unicodes[pi.component] or 0
end
+ math.horiz_parts = p
end
local ic = hv.italic_correction
if ic and ic ~= 0 then
@@ -6657,14 +6235,12 @@ otf.enhancers["check math"] = function(data,filename)
local uc = unicodes[index]
math.vert_variants = vv.variants
local p = vv.parts
- if p then
- if #p>0 then
- for i=1,#p do
- local pi = p[i]
- pi.glyph = unicodes[pi.component] or 0
- end
- math.vert_parts = p
+ if p and #p > 0 then
+ for i=1,#p do
+ local pi = p[i]
+ pi.glyph = unicodes[pi.component] or 0
end
+ math.vert_parts = p
end
local ic = vv.italic_correction
if ic and ic ~= 0 then
@@ -6700,7 +6276,7 @@ otf.enhancers["share widths"] = function(data,filename)
end
if most > 1000 then
if trace_loading then
- logs.report("load otf", "most common width: %s (%s times), sharing (cjk font)",wd,most)
+ report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most)
end
for k, v in next, glyphs do
if v.width == wd then
@@ -6713,10 +6289,15 @@ end
-- kern: ttf has a table with kerns
+-- Weird, as maxfirst and maxseconds can have holes, first seems to be indexed, but
+-- seconds can start at 2 .. this need to be fixed as getn as well as # are sort of
+-- unpredictable alternatively we could force an [1] if not set (maybe I will do that
+-- anyway).
+
--~ otf.enhancers["reorganize kerns"] = function(data,filename)
--~ local glyphs, mapmap, unicodes = data.glyphs, data.luatex.indices, data.luatex.unicodes
--~ local mkdone = false
---~ for index, glyph in next, data.glyphs do
+--~ for index, glyph in next, glyphs do
--~ if glyph.kerns then
--~ local mykerns = { }
--~ for k,v in next, glyph.kerns do
@@ -6725,7 +6306,7 @@ end
--~ local uvc = unicodes[vc]
--~ if not uvc then
--~ if trace_loading then
---~ logs.report("load otf","problems with unicode %s of kern %s at glyph %s",vc,k,index)
+--~ report_otf("problems with unicode %s of kern %s at glyph %s",vc,k,index)
--~ end
--~ else
--~ if type(vl) ~= "table" then
@@ -6755,16 +6336,19 @@ end
--~ end
--~ end
--~ if trace_loading and mkdone then
---~ logs.report("load otf", "replacing 'kerns' tables by 'mykerns' tables")
+--~ report_otf("replacing 'kerns' tables by 'mykerns' tables")
--~ end
--~ if data.kerns then
--~ if trace_loading then
---~ logs.report("load otf", "removing global 'kern' table")
+--~ report_otf("removing global 'kern' table")
--~ end
--~ data.kerns = nil
--~ end
--~ local dgpos = data.gpos
--~ if dgpos then
+--~ local separator = lpeg.P(" ")
+--~ local other = ((1 - separator)^0) / unicodes
+--~ local splitter = lpeg.Ct(other * (separator * other)^0)
--~ for gp=1,#dgpos do
--~ local gpos = dgpos[gp]
--~ local subtables = gpos.subtables
@@ -6773,56 +6357,70 @@ end
--~ local subtable = subtables[s]
--~ local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes
--~ if kernclass then -- the next one is quite slow
+--~ local split = { } -- saves time
--~ for k=1,#kernclass do
--~ local kcl = kernclass[k]
--~ local firsts, seconds, offsets, lookups = kcl.firsts, kcl.seconds, kcl.offsets, kcl.lookup -- singular
--~ if type(lookups) ~= "table" then
--~ lookups = { lookups }
--~ end
+--~ local maxfirsts, maxseconds = getn(firsts), getn(seconds)
+--~ for _, s in next, firsts do
+--~ split[s] = split[s] or lpegmatch(splitter,s)
+--~ end
+--~ for _, s in next, seconds do
+--~ split[s] = split[s] or lpegmatch(splitter,s)
+--~ end
--~ for l=1,#lookups do
--~ local lookup = lookups[l]
---~ -- weird, as maxfirst and maxseconds can have holes
---~ local maxfirsts, maxseconds = getn(firsts), getn(seconds)
---~ if trace_loading then
---~ logs.report("load otf", "adding kernclass %s with %s times %s pairs",lookup, maxfirsts, maxseconds)
---~ end
---~ for fk, fv in next, firsts do
---~ for first in gmatch(fv,"[^ ]+") do
---~ local first_unicode = unicodes[first]
---~ if type(first_unicode) == "number" then
---~ first_unicode = { first_unicode }
+--~ local function do_it(fk,first_unicode)
+--~ local glyph = glyphs[mapmap[first_unicode]]
+--~ if glyph then
+--~ local mykerns = glyph.mykerns
+--~ if not mykerns then
+--~ mykerns = { } -- unicode indexed !
+--~ glyph.mykerns = mykerns
--~ end
---~ for f=1,#first_unicode do
---~ local glyph = glyphs[mapmap[first_unicode[f]]]
---~ if glyph then
---~ local mykerns = glyph.mykerns
---~ if not mykerns then
---~ mykerns = { } -- unicode indexed !
---~ glyph.mykerns = mykerns
---~ end
---~ local lookupkerns = mykerns[lookup]
---~ if not lookupkerns then
---~ lookupkerns = { }
---~ mykerns[lookup] = lookupkerns
---~ end
---~ for sk, sv in next, seconds do
---~ local offset = offsets[(fk-1) * maxseconds + sk]
---~ --~ local offset = offsets[sk] -- (fk-1) * maxseconds + sk]
---~ for second in gmatch(sv,"[^ ]+") do
---~ local second_unicode = unicodes[second]
---~ if type(second_unicode) == "number" then
+--~ local lookupkerns = mykerns[lookup]
+--~ if not lookupkerns then
+--~ lookupkerns = { }
+--~ mykerns[lookup] = lookupkerns
+--~ end
+--~ local baseoffset = (fk-1) * maxseconds
+--~ for sk=2,maxseconds do -- we can avoid this loop with a table
+--~ local sv = seconds[sk]
+--~ local splt = split[sv]
+--~ if splt then
+--~ local offset = offsets[baseoffset + sk]
+--~ --~ local offset = offsets[sk] -- (fk-1) * maxseconds + sk]
+--~ if offset then
+--~ for i=1,#splt do
+--~ local second_unicode = splt[i]
+--~ if tonumber(second_unicode) then
--~ lookupkerns[second_unicode] = offset
---~ else
---~ for s=1,#second_unicode do
---~ lookupkerns[second_unicode[s]] = offset
---~ end
---~ end
+--~ else for s=1,#second_unicode do
+--~ lookupkerns[second_unicode[s]] = offset
+--~ end end
--~ end
--~ end
---~ elseif trace_loading then
---~ logs.report("load otf", "no glyph data for U+%04X", first_unicode[f])
--~ end
--~ end
+--~ elseif trace_loading then
+--~ report_otf("no glyph data for U+%04X", first_unicode)
+--~ end
+--~ end
+--~ for fk=1,#firsts do
+--~ local fv = firsts[fk]
+--~ local splt = split[fv]
+--~ if splt then
+--~ for i=1,#splt do
+--~ local first_unicode = splt[i]
+--~ if tonumber(first_unicode) then
+--~ do_it(fk,first_unicode)
+--~ else for f=1,#first_unicode do
+--~ do_it(fk,first_unicode[f])
+--~ end end
+--~ end
--~ end
--~ end
--~ end
@@ -6839,7 +6437,27 @@ end
otf.enhancers["reorganize kerns"] = function(data,filename)
local glyphs, mapmap, unicodes = data.glyphs, data.luatex.indices, data.luatex.unicodes
local mkdone = false
- for index, glyph in next, data.glyphs do
+ local function do_it(lookup,first_unicode,kerns)
+ local glyph = glyphs[mapmap[first_unicode]]
+ if glyph then
+ local mykerns = glyph.mykerns
+ if not mykerns then
+ mykerns = { } -- unicode indexed !
+ glyph.mykerns = mykerns
+ end
+ local lookupkerns = mykerns[lookup]
+ if not lookupkerns then
+ lookupkerns = { }
+ mykerns[lookup] = lookupkerns
+ end
+ for second_unicode, kern in next, kerns do
+ lookupkerns[second_unicode] = kern
+ end
+ elseif trace_loading then
+ report_otf("no glyph data for U+%04X", first_unicode)
+ end
+ end
+ for index, glyph in next, glyphs do
if glyph.kerns then
local mykerns = { }
for k,v in next, glyph.kerns do
@@ -6848,7 +6466,7 @@ otf.enhancers["reorganize kerns"] = function(data,filename)
local uvc = unicodes[vc]
if not uvc then
if trace_loading then
- logs.report("load otf","problems with unicode %s of kern %s at glyph %s",vc,k,index)
+ report_otf("problems with unicode %s of kern %s at glyph %s",vc,k,index)
end
else
if type(vl) ~= "table" then
@@ -6878,11 +6496,11 @@ otf.enhancers["reorganize kerns"] = function(data,filename)
end
end
if trace_loading and mkdone then
- logs.report("load otf", "replacing 'kerns' tables by 'mykerns' tables")
+ report_otf("replacing 'kerns' tables by 'mykerns' tables")
end
if data.kerns then
if trace_loading then
- logs.report("load otf", "removing global 'kern' table")
+ report_otf("removing global 'kern' table")
end
data.kerns = nil
end
@@ -6899,75 +6517,53 @@ otf.enhancers["reorganize kerns"] = function(data,filename)
local subtable = subtables[s]
local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes
if kernclass then -- the next one is quite slow
+ local split = { } -- saves time
for k=1,#kernclass do
local kcl = kernclass[k]
local firsts, seconds, offsets, lookups = kcl.firsts, kcl.seconds, kcl.offsets, kcl.lookup -- singular
if type(lookups) ~= "table" then
lookups = { lookups }
end
- local split = { }
+ local maxfirsts, maxseconds = getn(firsts), getn(seconds)
+ -- here we could convert split into a list of unicodes which is a bit
+ -- faster but as this is only done when caching it does not save us much
+ for _, s in next, firsts do
+ split[s] = split[s] or lpegmatch(splitter,s)
+ end
+ for _, s in next, seconds do
+ split[s] = split[s] or lpegmatch(splitter,s)
+ end
for l=1,#lookups do
local lookup = lookups[l]
- -- weird, as maxfirst and maxseconds can have holes, first seems to be indexed, seconds starts at 2
- local maxfirsts, maxseconds = getn(firsts), getn(seconds)
- for _, s in next, firsts do
- split[s] = split[s] or lpegmatch(splitter,s)
- end
- for _, s in next, seconds do
- split[s] = split[s] or lpegmatch(splitter,s)
- end
- if trace_loading then
- logs.report("load otf", "adding kernclass %s with %s times %s pairs",lookup, maxfirsts, maxseconds)
- end
- local function do_it(fk,first_unicode)
- local glyph = glyphs[mapmap[first_unicode]]
- if glyph then
- local mykerns = glyph.mykerns
- if not mykerns then
- mykerns = { } -- unicode indexed !
- glyph.mykerns = mykerns
- end
- local lookupkerns = mykerns[lookup]
- if not lookupkerns then
- lookupkerns = { }
- mykerns[lookup] = lookupkerns
- end
- local baseoffset = (fk-1) * maxseconds
+ for fk=1,#firsts do
+ local fv = firsts[fk]
+ local splt = split[fv]
+ if splt then
+ local kerns, baseoffset = { }, (fk-1) * maxseconds
for sk=2,maxseconds do
local sv = seconds[sk]
- local offset = offsets[baseoffset + sk]
- --~ local offset = offsets[sk] -- (fk-1) * maxseconds + sk]
local splt = split[sv]
if splt then
- for i=1,#splt do
- local second_unicode = splt[i]
- if tonumber(second_unicode) then
- lookupkerns[second_unicode] = offset
- else
- for s=1,#second_unicode do
- lookupkerns[second_unicode[s]] = offset
- end
+ local offset = offsets[baseoffset + sk]
+ if offset then
+ for i=1,#splt do
+ local second_unicode = splt[i]
+ if tonumber(second_unicode) then
+ kerns[second_unicode] = offset
+ else for s=1,#second_unicode do
+ kerns[second_unicode[s]] = offset
+ end end
end
end
end
end
- elseif trace_loading then
- logs.report("load otf", "no glyph data for U+%04X", first_unicode)
- end
- end
- for fk=1,#firsts do
- local fv = firsts[fk]
- local splt = split[fv]
- if splt then
for i=1,#splt do
local first_unicode = splt[i]
if tonumber(first_unicode) then
- do_it(fk,first_unicode)
- else
- for f=1,#first_unicode do
- do_it(fk,first_unicode[f])
- end
- end
+ do_it(lookup,first_unicode,kerns)
+ else for f=1,#first_unicode do
+ do_it(lookup,first_unicode[f],kerns)
+ end end
end
end
end
@@ -6982,6 +6578,14 @@ otf.enhancers["reorganize kerns"] = function(data,filename)
end
end
+
+
+
+
+
+
+
+
otf.enhancers["strip not needed data"] = function(data,filename)
local verbose = fonts.verbose
local int_to_uni = data.luatex.unicodes
@@ -7052,7 +6656,7 @@ otf.enhancers["check math parameters"] = function(data,filename)
local pmp = private_math_parameters[m]
if not mathdata[pmp] then
if trace_loading then
- logs.report("load otf", "setting math parameter '%s' to 0", pmp)
+ report_otf("setting math parameter '%s' to 0", pmp)
end
mathdata[pmp] = 0
end
@@ -7097,11 +6701,11 @@ otf.enhancers["flatten glyph lookups"] = function(data,filename)
end
else
if trace_loading then
- logs.report("load otf", "flattening needed, report to context list")
+ report_otf("flattening needed, report to context list")
end
for a, b in next, s do
if trace_loading and vvv[a] then
- logs.report("load otf", "flattening conflict, report to context list")
+ report_otf("flattening conflict, report to context list")
end
vvv[a] = b
end
@@ -7163,7 +6767,7 @@ otf.enhancers["flatten feature tables"] = function(data,filename)
for _, tag in next, otf.glists do
if data[tag] then
if trace_loading then
- logs.report("load otf", "flattening %s table", tag)
+ report_otf("flattening %s table", tag)
end
for k, v in next, data[tag] do
local features = v.features
@@ -7232,7 +6836,7 @@ function otf.set_features(tfmdata,features)
if value and fiotf[f] then -- brr
if not done[f] then -- so, we can move some to triggers
if trace_features then
- logs.report("define otf","initializing feature %s to %s for mode %s for font %s",f,tostring(value),mode or 'unknown', tfmdata.fullname or 'unknown')
+ report_otf("initializing feature %s to %s for mode %s for font %s",f,tostring(value),mode or 'unknown', tfmdata.fullname or 'unknown')
end
fiotf[f](tfmdata,value) -- can set mode (no need to pass otf)
mode = tfmdata.mode or fonts.mode -- keep this, mode can be set local !
@@ -7259,7 +6863,7 @@ function otf.set_features(tfmdata,features)
local f = list[i]
if fmotf[f] then -- brr
if trace_features then
- logs.report("define otf","installing feature handler %s for mode %s for font %s",f,mode or 'unknown', tfmdata.fullname or 'unknown')
+ report_otf("installing feature handler %s for mode %s for font %s",f,mode or 'unknown', tfmdata.fullname or 'unknown')
end
processes[#processes+1] = fmotf[f]
end
@@ -7281,7 +6885,7 @@ function otf.otf_to_tfm(specification)
local format = specification.format
local features = specification.features.normal
local cache_id = specification.hash
- local tfmdata = containers.read(tfm.cache(),cache_id)
+ local tfmdata = containers.read(tfm.cache,cache_id)
--~ print(cache_id)
if not tfmdata then
local otfdata = otf.load(filename,format,sub,features and features.featurefile)
@@ -7315,7 +6919,7 @@ function otf.otf_to_tfm(specification)
shared.processes, shared.features = otf.set_features(tfmdata,fonts.define.check(features,otf.features.default))
end
end
- containers.write(tfm.cache(),cache_id,tfmdata)
+ containers.write(tfm.cache,cache_id,tfmdata)
end
return tfmdata
end
@@ -7361,7 +6965,7 @@ function otf.copy_to_tfm(data,cache_id) -- we can save a copy when we reorder th
if designsize == 0 then
designsize = 100
end
- local spaceunits = 500
+ local spaceunits, spacer = 500, "space"
-- indices maps from unicodes to indices
for u, i in next, indices do
characters[u] = { } -- we need this because for instance we add protruding info and loop over characters
@@ -7380,9 +6984,8 @@ function otf.copy_to_tfm(data,cache_id) -- we can save a copy when we reorder th
-- we have them shared because that packs nicer
-- we could prepare the variants and keep 'm in descriptions
if m then
- local variants = m.horiz_variants
+ local variants, parts, c = m.horiz_variants, m.horiz_parts, char
if variants then
- local c = char
for n in gmatch(variants,"[^ ]+") do
local un = unicodes[n]
if un and u ~= un then
@@ -7390,21 +6993,26 @@ function otf.copy_to_tfm(data,cache_id) -- we can save a copy when we reorder th
c = characters[un]
end
end
- c.horiz_variants = m.horiz_parts
- else
- local variants = m.vert_variants
- if variants then
- local c = char
- for n in gmatch(variants,"[^ ]+") do
- local un = unicodes[n]
- if un and u ~= un then
- c.next = un
- c = characters[un]
- end
+ c.horiz_variants = parts
+ elseif parts then
+ c.horiz_variants = parts
+ end
+ local variants, parts, c = m.vert_variants, m.vert_parts, char
+ if variants then
+ for n in gmatch(variants,"[^ ]+") do
+ local un = unicodes[n]
+ if un and u ~= un then
+ c.next = un
+ c = characters[un]
end
- c.vert_variants = m.vert_parts
- c.vert_italic_correction = m.vert_italic_correction
- end
+ end -- c is now last in chain
+ c.vert_variants = parts
+ elseif parts then
+ c.vert_variants = parts
+ end
+ local italic_correction = m.vert_italic_correction
+ if italic_correction then
+ c.vert_italic_correction = italic_correction
end
local kerns = m.kerns
if kerns then
@@ -7533,7 +7141,7 @@ function tfm.read_from_open_type(specification)
if p then
local ps = p * specification.textsize / 100
if trace_math then
- logs.report("define font","asked script size: %s, used: %s (%2.2f %%)",s,ps,(ps/s)*100)
+ report_otf("asked script size: %s, used: %s (%2.2f %%)",s,ps,(ps/s)*100)
end
s = ps
end
@@ -7542,7 +7150,7 @@ function tfm.read_from_open_type(specification)
if p then
local ps = p * specification.textsize / 100
if trace_math then
- logs.report("define font","asked scriptscript size: %s, used: %s (%2.2f %%)",s,ps,(ps/s)*100)
+ report_otf("asked scriptscript size: %s, used: %s (%2.2f %%)",s,ps,(ps/s)*100)
end
s = ps
end
@@ -7557,7 +7165,7 @@ function tfm.read_from_open_type(specification)
if specname then
tfmtable.name = specname
if trace_defining then
- logs.report("define font","overloaded fontname: '%s'",specname)
+ report_otf("overloaded fontname: '%s'",specname)
end
end
end
@@ -7612,7 +7220,9 @@ if not modules then modules = { } end modules ['font-otd'] = {
license = "see context related readme files"
}
-local trace_dynamics = false trackers.register("otf.dynamics", function(v) trace_dynamics = v end)
+local trace_dynamics = false trackers.register("otf.dynamics", function(v) trace_dynamics = v end)
+
+local report_otf = logs.new("load otf")
fonts = fonts or { }
fonts.otf = fonts.otf or { }
@@ -7630,7 +7240,7 @@ local a_to_script = { } otf.a_to_script = a_to_script
local a_to_language = { } otf.a_to_language = a_to_language
function otf.set_dynamics(font,dynamics,attribute)
- features = context_setups[context_numbers[attribute]] -- can be moved to caller
+ local features = context_setups[context_numbers[attribute]] -- can be moved to caller
if features then
local script = features.script or 'dflt'
local language = features.language or 'dflt'
@@ -7647,7 +7257,7 @@ function otf.set_dynamics(font,dynamics,attribute)
local dsla = dsl[attribute]
if dsla then
-- if trace_dynamics then
- -- logs.report("otf define","using dynamics %s: attribute %s, script %s, language %s",context_numbers[attribute],attribute,script,language)
+ -- report_otf("using dynamics %s: attribute %s, script %s, language %s",context_numbers[attribute],attribute,script,language)
-- end
return dsla
else
@@ -7666,9 +7276,10 @@ function otf.set_dynamics(font,dynamics,attribute)
tfmdata.script = script
tfmdata.shared.features = { }
-- end of save
- dsla = otf.set_features(tfmdata,fonts.define.check(features,otf.features.default))
+ local set = fonts.define.check(features,otf.features.default)
+ dsla = otf.set_features(tfmdata,set)
if trace_dynamics then
- logs.report("otf define","setting dynamics %s: attribute %s, script %s, language %s",context_numbers[attribute],attribute,script,language)
+ report_otf("setting dynamics %s: attribute %s, script %s, language %s, set: %s",context_numbers[attribute],attribute,script,language,table.sequenced(set))
end
-- we need to restore some values
tfmdata.script = saved.script
@@ -7773,6 +7384,8 @@ local trace_ligatures = false trackers.register("otf.ligatures", function
local trace_kerns = false trackers.register("otf.kerns", function(v) trace_kerns = v end)
local trace_preparing = false trackers.register("otf.preparing", function(v) trace_preparing = v end)
+local report_prepare = logs.new("otf prepare")
+
local wildcard = "*"
local default = "dflt"
@@ -7792,8 +7405,20 @@ local function gref(descriptions,n)
local num, nam = { }, { }
for i=1,#n do
local ni = n[i]
- num[i] = format("U+%04X",ni)
- nam[i] = descriptions[ni].name or "?"
+ -- ! ! ! could be a helper ! ! !
+ if type(ni) == "table" then
+ local nnum, nnam = { }, { }
+ for j=1,#ni do
+ local nj = ni[j]
+ nnum[j] = format("U+%04X",nj)
+ nnam[j] = descriptions[nj].name or "?"
+ end
+ num[i] = concat(nnum,"|")
+ nam[i] = concat(nnam,"|")
+ else
+ num[i] = format("U+%04X",ni)
+ nam[i] = descriptions[ni].name or "?"
+ end
end
return format("%s (%s)",concat(num," "), concat(nam," "))
else
@@ -7827,7 +7452,7 @@ local function resolve_ligatures(tfmdata,ligatures,kind)
local c, f, s = characters[uc], ligs[1], ligs[2]
local uft, ust = unicodes[f] or 0, unicodes[s] or 0
if not uft or not ust then
- logs.report("define otf","%s: unicode problem with base ligature %s = %s + %s",cref(kind),gref(descriptions,uc),gref(descriptions,uft),gref(descriptions,ust))
+ report_prepare("%s: unicode problem with base ligature %s = %s + %s",cref(kind),gref(descriptions,uc),gref(descriptions,uft),gref(descriptions,ust))
-- some kind of error
else
if type(uft) == "number" then uft = { uft } end
@@ -7838,7 +7463,7 @@ local function resolve_ligatures(tfmdata,ligatures,kind)
local us = ust[usi]
if changed[uf] or changed[us] then
if trace_baseinit and trace_ligatures then
- logs.report("define otf","%s: base ligature %s + %s ignored",cref(kind),gref(descriptions,uf),gref(descriptions,us))
+ report_prepare("%s: base ligature %s + %s ignored",cref(kind),gref(descriptions,uf),gref(descriptions,us))
end
else
local first, second = characters[uf], us
@@ -7854,7 +7479,7 @@ local function resolve_ligatures(tfmdata,ligatures,kind)
t[second] = { type = 0, char = uc[1] } -- can this still happen?
end
if trace_baseinit and trace_ligatures then
- logs.report("define otf","%s: base ligature %s + %s => %s",cref(kind),gref(descriptions,uf),gref(descriptions,us),gref(descriptions,uc))
+ report_prepare("%s: base ligature %s + %s => %s",cref(kind),gref(descriptions,uf),gref(descriptions,us),gref(descriptions,uc))
end
end
end
@@ -7887,7 +7512,7 @@ end
local splitter = lpeg.splitat(" ")
-function prepare_base_substitutions(tfmdata,kind,value) -- we can share some code with the node features
+local function prepare_base_substitutions(tfmdata,kind,value) -- we can share some code with the node features
if value then
local otfdata = tfmdata.shared.otfdata
local validlookups, lookuplist = otf.collect_lookups(otfdata,kind,tfmdata.script,tfmdata.language)
@@ -7910,7 +7535,7 @@ function prepare_base_substitutions(tfmdata,kind,value) -- we can share some cod
end
if characters[upv] then
if trace_baseinit and trace_singles then
- logs.report("define otf","%s: base substitution %s => %s",cref(kind,lookup),gref(descriptions,k),gref(descriptions,upv))
+ report_prepare("%s: base substitution %s => %s",cref(kind,lookup),gref(descriptions,k),gref(descriptions,upv))
end
changed[k] = upv
end
@@ -7938,7 +7563,7 @@ function prepare_base_substitutions(tfmdata,kind,value) -- we can share some cod
end
if characters[upc] then
if trace_baseinit and trace_alternatives then
- logs.report("define otf","%s: base alternate %s %s => %s",cref(kind,lookup),tostring(value),gref(descriptions,k),gref(descriptions,upc))
+ report_prepare("%s: base alternate %s %s => %s",cref(kind,lookup),tostring(value),gref(descriptions,k),gref(descriptions,upc))
end
changed[k] = upc
end
@@ -7953,7 +7578,7 @@ function prepare_base_substitutions(tfmdata,kind,value) -- we can share some cod
local upc = { lpegmatch(splitter,pc) }
for i=1,#upc do upc[i] = unicodes[upc[i]] end
-- we assume that it's no table
- logs.report("define otf","%s: base ligature %s => %s",cref(kind,lookup),gref(descriptions,upc),gref(descriptions,k))
+ report_prepare("%s: base ligature %s => %s",cref(kind,lookup),gref(descriptions,upc),gref(descriptions,k))
end
ligatures[#ligatures+1] = { pc, k }
end
@@ -8029,7 +7654,7 @@ local function prepare_base_kerns(tfmdata,kind,value) -- todo what kind of kerns
if v ~= 0 and not t[k] then -- maybe no 0 test here
t[k], done = v, true
if trace_baseinit and trace_kerns then
- logs.report("define otf","%s: base kern %s + %s => %s",cref(kind,lookup),gref(descriptions,u),gref(descriptions,k),v)
+ report_prepare("%s: base kern %s + %s => %s",cref(kind,lookup),gref(descriptions,u),gref(descriptions,k),v)
end
end
end
@@ -8115,10 +7740,10 @@ function fonts.initializers.base.otf.features(tfmdata,value)
-- verbose name as long as we don't use <()<>[]{}/%> and the length
-- is < 128.
tfmdata.fullname = tfmdata.fullname .. "-" .. base -- tfmdata.psname is the original
- --~ logs.report("otf define","fullname base hash: '%s', featureset '%s'",tfmdata.fullname,hash)
+ --~ report_prepare("fullname base hash: '%s', featureset '%s'",tfmdata.fullname,hash)
end
if trace_preparing then
- logs.report("otf define","preparation time is %0.3f seconds for %s",os.clock()-t,tfmdata.fullname or "?")
+ report_prepare("preparation time is %0.3f seconds for %s",os.clock()-t,tfmdata.fullname or "?")
end
end
end
@@ -8274,6 +7899,12 @@ local trace_steps = false trackers.register("otf.steps", function
local trace_skips = false trackers.register("otf.skips", function(v) trace_skips = v end)
local trace_directions = false trackers.register("otf.directions", function(v) trace_directions = v end)
+local report_direct = logs.new("otf direct")
+local report_subchain = logs.new("otf subchain")
+local report_chain = logs.new("otf chain")
+local report_process = logs.new("otf process")
+local report_prepare = logs.new("otf prepare")
+
trackers.register("otf.verbose_chain", function(v) otf.setcontextchain(v and "verbose") end)
trackers.register("otf.normal_chain", function(v) otf.setcontextchain(v and "normal") end)
@@ -8371,10 +8002,10 @@ local function logprocess(...)
if trace_steps then
registermessage(...)
end
- logs.report("otf direct",...)
+ report_direct(...)
end
local function logwarning(...)
- logs.report("otf direct",...)
+ report_direct(...)
end
local function gref(n)
@@ -8951,7 +8582,7 @@ local krn = kerns[nextchar]
end
end
else
- logs.report("%s: check this out (old kern stuff)",pref(kind,lookupname))
+ report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
local a, b = krn[3], krn[7]
if a and a ~= 0 then
local k = set_kern(snext,factor,rlmode,a)
@@ -8990,12 +8621,11 @@ local function logprocess(...)
if trace_steps then
registermessage(...)
end
- logs.report("otf subchain",...)
-end
-local function logwarning(...)
- logs.report("otf subchain",...)
+ report_subchain(...)
end
+local logwarning = report_subchain
+
-- ['coverage']={
-- ['after']={ "r" },
-- ['before']={ "q" },
@@ -9033,12 +8663,11 @@ local function logprocess(...)
if trace_steps then
registermessage(...)
end
- logs.report("otf chain",...)
-end
-local function logwarning(...)
- logs.report("otf chain",...)
+ report_chain(...)
end
+local logwarning = report_chain
+
-- We could share functions but that would lead to extra function calls with many
-- arguments, redundant tests and confusing messages.
@@ -9187,7 +8816,7 @@ end
<p>Here we replace start by new glyph. First we delete the rest of the match.</p>
--ldx]]--
-function chainprocs.gsub_alternate(start,stop,kind,lookupname,currentcontext,cache,currentlookup)
+function chainprocs.gsub_alternate(start,stop,kind,chainname,currentcontext,cache,currentlookup,chainlookupname)
-- todo: marks ?
delete_till_stop(start,stop)
local current = start
@@ -9284,7 +8913,7 @@ function chainprocs.gsub_ligature(start,stop,kind,chainname,currentcontext,cache
logprocess("%s: replacing character %s upto %s by ligature %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2))
end
end
- start = toligature(kind,lookup,start,stop,l2,currentlookup.flags[1],discfound)
+ start = toligature(kind,lookupname,start,stop,l2,currentlookup.flags[1],discfound)
return start, true, nofreplacements
elseif trace_bugs then
if start == stop then
@@ -9619,7 +9248,7 @@ function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,cache,cur
end
end
else
- logs.report("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
+ report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
local a, b = krn[3], krn[7]
if a and a ~= 0 then
local k = set_kern(snext,factor,rlmode,a)
@@ -9993,12 +9622,11 @@ local function logprocess(...)
if trace_steps then
registermessage(...)
end
- logs.report("otf process",...)
-end
-local function logwarning(...)
- logs.report("otf process",...)
+ report_process(...)
end
+local logwarning = report_process
+
local function report_missing_cache(typ,lookup)
local f = missing[currentfont] if not f then f = { } missing[currentfont] = f end
local t = f[typ] if not t then t = { } f[typ] = t end
@@ -10011,6 +9639,9 @@ end
local resolved = { } -- we only resolve a font,script,language pair once
-- todo: pass all these 'locals' in a table
+--
+-- dynamics will be isolated some day ... for the moment we catch attribute zero
+-- not being set
function fonts.methods.node.otf.features(head,font,attr)
if trace_steps then
@@ -10055,8 +9686,7 @@ function fonts.methods.node.otf.features(head,font,attr)
local ra = rl [attr] if ra == nil then ra = { } rl [attr] = ra end -- attr can be false
-- sequences always > 1 so no need for optimization
for s=1,#sequences do
- local pardir, txtdir = 0, { }
- local success = false
+ local pardir, txtdir, success = 0, { }, false
local sequence = sequences[s]
local r = ra[s] -- cache
if r == nil then
@@ -10094,7 +9724,7 @@ function fonts.methods.node.otf.features(head,font,attr)
end
if trace_applied then
local typ, action = match(sequence.type,"(.*)_(.*)")
- logs.report("otf node mode",
+ report_process(
"%s font: %03i, dynamic: %03i, kind: %s, lookup: %3i, script: %-4s, language: %-4s (%-4s), type: %s, action: %s, name: %s",
(valid and "+") or "-",font,attr or 0,kind,s,script,language,what,typ,action,sequence.name)
end
@@ -10123,24 +9753,33 @@ function fonts.methods.node.otf.features(head,font,attr)
while start do
local id = start.id
if id == glyph then
- if start.subtype<256 and start.font == font and (not attr or has_attribute(start,0,attr)) then
---~ if start.subtype<256 and start.font == font and has_attribute(start,0,attr) then
- for i=1,#subtables do
- local lookupname = subtables[i]
- local lookupcache = thecache[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- start, success = handler(start,r[4],lookupname,lookupmatch,sequence,featuredata,i)
- if success then
- break
+ if start.subtype<256 and start.font == font then
+ local a = has_attribute(start,0)
+ if a then
+ a = a == attr
+ else
+ a = true
+ end
+ if a then
+ for i=1,#subtables do
+ local lookupname = subtables[i]
+ local lookupcache = thecache[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[start.char]
+ if lookupmatch then
+ start, success = handler(start,r[4],lookupname,lookupmatch,sequence,featuredata,i)
+ if success then
+ break
+ end
end
+ else
+ report_missing_cache(typ,lookupname)
end
- else
- report_missing_cache(typ,lookupname)
end
+ if start then start = start.prev end
+ else
+ start = start.prev
end
- if start then start = start.prev end
else
start = start.prev
end
@@ -10163,18 +9802,27 @@ function fonts.methods.node.otf.features(head,font,attr)
while start do
local id = start.id
if id == glyph then
---~ if start.font == font and start.subtype<256 and has_attribute(start,0,attr) and (not attribute or has_attribute(start,state,attribute)) then
- if start.font == font and start.subtype<256 and (not attr or has_attribute(start,0,attr)) and (not attribute or has_attribute(start,state,attribute)) then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- -- sequence kan weg
- local ok
- start, ok = handler(start,r[4],lookupname,lookupmatch,sequence,featuredata,1)
- if ok then
- success = true
+ if start.subtype<256 and start.font == font then
+ local a = has_attribute(start,0)
+ if a then
+ a = (a == attr) and (not attribute or has_attribute(start,state,attribute))
+ else
+ a = not attribute or has_attribute(start,state,attribute)
+ end
+ if a then
+ local lookupmatch = lookupcache[start.char]
+ if lookupmatch then
+ -- sequence kan weg
+ local ok
+ start, ok = handler(start,r[4],lookupname,lookupmatch,sequence,featuredata,1)
+ if ok then
+ success = true
+ end
end
+ if start then start = start.next end
+ else
+ start = start.next
end
- if start then start = start.next end
else
start = start.next
end
@@ -10211,7 +9859,7 @@ function fonts.methods.node.otf.features(head,font,attr)
rlmode = pardir
end
if trace_directions then
- logs.report("fonts","directions after textdir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
+ report_process("directions after textdir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
end
elseif subtype == 6 then
local dir = start.dir
@@ -10225,7 +9873,7 @@ function fonts.methods.node.otf.features(head,font,attr)
rlmode = pardir
--~ txtdir = { }
if trace_directions then
- logs.report("fonts","directions after pardir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
+ report_process("directions after pardir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
end
end
start = start.next
@@ -10238,27 +9886,36 @@ function fonts.methods.node.otf.features(head,font,attr)
while start do
local id = start.id
if id == glyph then
- if start.subtype<256 and start.font == font and (not attr or has_attribute(start,0,attr)) and (not attribute or has_attribute(start,state,attribute)) then
---~ if start.subtype<256 and start.font == font and has_attribute(start,0,attr) and (not attribute or has_attribute(start,state,attribute)) then
- for i=1,ns do
- local lookupname = subtables[i]
- local lookupcache = thecache[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local ok
- start, ok = handler(start,r[4],lookupname,lookupmatch,sequence,featuredata,i)
- if ok then
- success = true
- break
+ if start.subtype<256 and start.font == font then
+ local a = has_attribute(start,0)
+ if a then
+ a = (a == attr) and (not attribute or has_attribute(start,state,attribute))
+ else
+ a = not attribute or has_attribute(start,state,attribute)
+ end
+ if a then
+ for i=1,ns do
+ local lookupname = subtables[i]
+ local lookupcache = thecache[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[start.char]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local ok
+ start, ok = handler(start,r[4],lookupname,lookupmatch,sequence,featuredata,i)
+ if ok then
+ success = true
+ break
+ end
end
+ else
+ report_missing_cache(typ,lookupname)
end
- else
- report_missing_cache(typ,lookupname)
end
+ if start then start = start.next end
+ else
+ start = start.next
end
- if start then start = start.next end
else
start = start.next
end
@@ -10279,7 +9936,6 @@ function fonts.methods.node.otf.features(head,font,attr)
-- end
elseif id == whatsit then
local subtype = start.subtype
- local subtype = start.subtype
if subtype == 7 then
local dir = start.dir
if dir == "+TRT" or dir == "+TLT" then
@@ -10296,7 +9952,7 @@ function fonts.methods.node.otf.features(head,font,attr)
rlmode = pardir
end
if trace_directions then
- logs.report("fonts","directions after textdir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
+ report_process("directions after textdir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
end
elseif subtype == 6 then
local dir = start.dir
@@ -10310,7 +9966,7 @@ function fonts.methods.node.otf.features(head,font,attr)
rlmode = pardir
--~ txtdir = { }
if trace_directions then
- logs.report("fonts","directions after pardir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
+ report_process("directions after pardir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
end
end
start = start.next
@@ -10409,7 +10065,7 @@ local function prepare_lookups(tfmdata)
-- as well (no big deal)
--
local action = {
- substitution = function(p,lookup,k,glyph,unicode)
+ substitution = function(p,lookup,glyph,unicode)
local old, new = unicode, unicodes[p[2]]
if type(new) == "table" then
new = new[1]
@@ -10418,10 +10074,10 @@ local function prepare_lookups(tfmdata)
if not s then s = { } single[lookup] = s end
s[old] = new
--~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: substitution %s => %s",lookup,old,new)
+ --~ report_prepare("lookup %s: substitution %s => %s",lookup,old,new)
--~ end
end,
- multiple = function (p,lookup,k,glyph,unicode)
+ multiple = function (p,lookup,glyph,unicode)
local old, new = unicode, { }
local m = multiple[lookup]
if not m then m = { } multiple[lookup] = m end
@@ -10435,10 +10091,10 @@ local function prepare_lookups(tfmdata)
end
end
--~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: multiple %s => %s",lookup,old,concat(new," "))
+ --~ report_prepare("lookup %s: multiple %s => %s",lookup,old,concat(new," "))
--~ end
end,
- alternate = function(p,lookup,k,glyph,unicode)
+ alternate = function(p,lookup,glyph,unicode)
local old, new = unicode, { }
local a = alternate[lookup]
if not a then a = { } alternate[lookup] = a end
@@ -10452,12 +10108,12 @@ local function prepare_lookups(tfmdata)
end
end
--~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: alternate %s => %s",lookup,old,concat(new,"|"))
+ --~ report_prepare("lookup %s: alternate %s => %s",lookup,old,concat(new,"|"))
--~ end
end,
- ligature = function (p,lookup,k,glyph,unicode)
+ ligature = function (p,lookup,glyph,unicode)
--~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: ligature %s => %s",lookup,p[2],glyph.name)
+ --~ report_prepare("lookup %s: ligature %s => %s",lookup,p[2],glyph.name)
--~ end
local first = true
local t = ligature[lookup]
@@ -10466,7 +10122,7 @@ local function prepare_lookups(tfmdata)
if first then
local u = unicodes[s]
if not u then
- logs.report("define otf","lookup %s: ligature %s => %s ignored due to invalid unicode",lookup,p[2],glyph.name)
+ report_prepare("lookup %s: ligature %s => %s ignored due to invalid unicode",lookup,p[2],glyph.name)
break
elseif type(u) == "number" then
if not t[u] then
@@ -10503,13 +10159,13 @@ local function prepare_lookups(tfmdata)
end
t[2] = unicode
end,
- position = function(p,lookup,k,glyph,unicode)
+ position = function(p,lookup,glyph,unicode)
-- not used
local s = position[lookup]
if not s then s = { } position[lookup] = s end
s[unicode] = p[2] -- direct pointer to kern spec
end,
- pair = function(p,lookup,k,glyph,unicode)
+ pair = function(p,lookup,glyph,unicode)
local s = pair[lookup]
if not s then s = { } pair[lookup] = s end
local others = s[unicode]
@@ -10536,7 +10192,7 @@ local function prepare_lookups(tfmdata)
end
end
--~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: pair for U+%04X",lookup,unicode)
+ --~ report_prepare("lookup %s: pair for U+%04X",lookup,unicode)
--~ end
end,
}
@@ -10545,7 +10201,7 @@ local function prepare_lookups(tfmdata)
local lookups = glyph.slookups
if lookups then
for lookup, p in next, lookups do
- action[p[1]](p,lookup,k,glyph,unicode)
+ action[p[1]](p,lookup,glyph,unicode)
end
end
local lookups = glyph.mlookups
@@ -10553,7 +10209,7 @@ local function prepare_lookups(tfmdata)
for lookup, whatever in next, lookups do
for i=1,#whatever do -- normaly one
local p = whatever[i]
- action[p[1]](p,lookup,k,glyph,unicode)
+ action[p[1]](p,lookup,glyph,unicode)
end
end
end
@@ -10564,7 +10220,7 @@ local function prepare_lookups(tfmdata)
if not k then k = { } kerns[lookup] = k end
k[unicode] = krn -- ref to glyph, saves lookup
--~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: kern for U+%04X",lookup,unicode)
+ --~ report_prepare("lookup %s: kern for U+%04X",lookup,unicode)
--~ end
end
end
@@ -10580,7 +10236,7 @@ local function prepare_lookups(tfmdata)
if not f then f = { } mark[lookup] = f end
f[unicode] = anchors -- ref to glyph, saves lookup
--~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: mark anchor %s for U+%04X",lookup,name,unicode)
+ --~ report_prepare("lookup %s: mark anchor %s for U+%04X",lookup,name,unicode)
--~ end
end
end
@@ -10594,7 +10250,7 @@ local function prepare_lookups(tfmdata)
if not f then f = { } cursive[lookup] = f end
f[unicode] = anchors -- ref to glyph, saves lookup
--~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: exit anchor %s for U+%04X",lookup,name,unicode)
+ --~ report_prepare("lookup %s: exit anchor %s for U+%04X",lookup,name,unicode)
--~ end
end
end
@@ -10608,7 +10264,7 @@ end
-- local cache = { }
luatex = luatex or {} -- this has to change ... we need a better one
-function prepare_contextchains(tfmdata)
+local function prepare_contextchains(tfmdata)
local otfdata = tfmdata.shared.otfdata
local lookups = otfdata.lookups
if lookups then
@@ -10627,7 +10283,7 @@ function prepare_contextchains(tfmdata)
for lookupname, lookupdata in next, otfdata.lookups do
local lookuptype = lookupdata.type
if not lookuptype then
- logs.report("otf process","missing lookuptype for %s",lookupname)
+ report_prepare("missing lookuptype for %s",lookupname)
else
local rules = lookupdata.rules
if rules then
@@ -10635,7 +10291,7 @@ function prepare_contextchains(tfmdata)
-- contextchain[lookupname][unicode]
if fmt == "coverage" then
if lookuptype ~= "chainsub" and lookuptype ~= "chainpos" then
- logs.report("otf process","unsupported coverage %s for %s",lookuptype,lookupname)
+ report_prepare("unsupported coverage %s for %s",lookuptype,lookupname)
else
local contexts = contextchain[lookupname]
if not contexts then
@@ -10671,7 +10327,7 @@ function prepare_contextchains(tfmdata)
end
elseif fmt == "reversecoverage" then
if lookuptype ~= "reversesub" then
- logs.report("otf process","unsupported reverse coverage %s for %s",lookuptype,lookupname)
+ report_prepare("unsupported reverse coverage %s for %s",lookuptype,lookupname)
else
local contexts = reversecontextchain[lookupname]
if not contexts then
@@ -10711,7 +10367,7 @@ function prepare_contextchains(tfmdata)
end
elseif fmt == "glyphs" then
if lookuptype ~= "chainsub" and lookuptype ~= "chainpos" then
- logs.report("otf process","unsupported coverage %s for %s",lookuptype,lookupname)
+ report_prepare("unsupported coverage %s for %s",lookuptype,lookupname)
else
local contexts = contextchain[lookupname]
if not contexts then
@@ -10782,7 +10438,7 @@ function fonts.initializers.node.otf.features(tfmdata,value)
prepare_lookups(tfmdata)
otfdata.shared.initialized = true
if trace_preparing then
- logs.report("otf process","preparation time is %0.3f seconds for %s",os.clock()-t,tfmdata.fullname or "?")
+ report_prepare("preparation time is %0.3f seconds for %s",os.clock()-t,tfmdata.fullname or "?")
end
end
end
@@ -10845,6 +10501,7 @@ local a_to_language = otf.a_to_language
-- somewhat slower; and .. we need a chain of them
function fonts.initializers.node.otf.analyze(tfmdata,value,attr)
+ local script, language
if attr and attr > 0 then
script, language = a_to_script[attr], a_to_language[attr]
else
@@ -11098,6 +10755,8 @@ local type, next = type, next
local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
+local report_otf = logs.new("load otf")
+
local otf = fonts.otf
local tfm = fonts.tfm
@@ -11271,7 +10930,7 @@ fonts.otf.enhancers["enrich with features"] = function(data,filename)
end
if done > 0 then
if trace_loading then
- logs.report("load otf","enhance: registering %s feature (%s glyphs affected)",kind,done)
+ report_otf("enhance: registering %s feature (%s glyphs affected)",kind,done)
end
end
end
@@ -11319,6 +10978,9 @@ local directive_embedall = false directives.register("fonts.embedall", function
trackers.register("fonts.loading", "fonts.defining", "otf.loading", "afm.loading", "tfm.loading")
trackers.register("fonts.all", "fonts.*", "otf.*", "afm.*", "tfm.*")
+local report_define = logs.new("define fonts")
+local report_afm = logs.new("load afm")
+
--[[ldx--
<p>Here we deal with defining fonts. We do so by intercepting the
default loader that only handles <l n='tfm'/>.</p>
@@ -11423,7 +11085,7 @@ end
function define.makespecification(specification, lookup, name, sub, method, detail, size)
size = size or 655360
if trace_defining then
- logs.report("define font","%s -> lookup: %s, name: %s, sub: %s, method: %s, detail: %s",
+ report_define("%s -> lookup: %s, name: %s, sub: %s, method: %s, detail: %s",
specification, (lookup ~= "" and lookup) or "[file]", (name ~= "" and name) or "-",
(sub ~= "" and sub) or "-", (method ~= "" and method) or "-", (detail ~= "" and detail) or "-")
end
@@ -11536,18 +11198,29 @@ end
define.resolvers = resolvers
+-- todo: reporter
+
function define.resolvers.file(specification)
- specification.forced = file.extname(specification.name)
- specification.name = file.removesuffix(specification.name)
+ local suffix = file.suffix(specification.name)
+ if fonts.formats[suffix] then
+ specification.forced = suffix
+ specification.name = file.removesuffix(specification.name)
+ end
end
function define.resolvers.name(specification)
local resolve = fonts.names.resolve
if resolve then
- specification.resolved, specification.sub = fonts.names.resolve(specification.name,specification.sub)
- if specification.resolved then
- specification.forced = file.extname(specification.resolved)
- specification.name = file.removesuffix(specification.resolved)
+ local resolved, sub = fonts.names.resolve(specification.name,specification.sub)
+ specification.resolved, specification.sub = resolved, sub
+ if resolved then
+ local suffix = file.suffix(resolved)
+ if fonts.formats[suffix] then
+ specification.forced = suffix
+ specification.name = file.removesuffix(resolved)
+ else
+ specification.name = resolved
+ end
end
else
define.resolvers.file(specification)
@@ -11610,14 +11283,14 @@ function tfm.read(specification)
if forced ~= "" then
tfmtable = readers[lower(forced)](specification)
if not tfmtable then
- logs.report("define font","forced type %s of %s not found",forced,specification.name)
+ report_define("forced type %s of %s not found",forced,specification.name)
end
else
for s=1,#sequence do -- reader sequence
local reader = sequence[s]
if readers[reader] then -- not really needed
if trace_defining then
- logs.report("define font","trying (reader sequence driven) type %s for %s with file %s",reader,specification.name,specification.filename or "unknown")
+ report_define("trying (reader sequence driven) type %s for %s with file %s",reader,specification.name,specification.filename or "unknown")
end
tfmtable = readers[reader](specification)
if tfmtable then
@@ -11642,7 +11315,7 @@ function tfm.read(specification)
end
end
if not tfmtable then
- logs.report("define font","font with name %s is not found",specification.name)
+ report_define("font with name %s is not found",specification.name)
end
return tfmtable
end
@@ -11702,7 +11375,7 @@ local function check_afm(specification,fullname)
foundname = shortname
-- tfm.set_normal_feature(specification,'encoding',encoding) -- will go away
if trace_loading then
- logs.report("load afm","stripping encoding prefix from filename %s",afmname)
+ report_afm("stripping encoding prefix from filename %s",afmname)
end
end
end
@@ -11759,7 +11432,7 @@ end
local function check_otf(forced,specification,suffix,what)
local name = specification.name
if forced then
- name = file.addsuffix(name,suffix)
+ name = file.addsuffix(name,suffix,true)
end
local fullname, tfmtable = resolvers.findbinfile(name,suffix) or "", nil -- one shot
if fullname == "" then
@@ -11835,7 +11508,7 @@ function define.register(fontdata,id)
local hash = fontdata.hash
if not tfm.internalized[hash] then
if trace_defining then
- logs.report("define font","loading at 2 id %s, hash: %s",id or "?",hash or "?")
+ report_define("loading at 2 id %s, hash: %s",id or "?",hash or "?")
end
fonts.identifiers[id] = fontdata
fonts.characters [id] = fontdata.characters
@@ -11881,7 +11554,7 @@ function define.read(specification,size,id) -- id can be optional, name can alre
specification = define.resolve(specification)
local hash = tfm.hash_instance(specification)
if cache_them then
- local fontdata = containers.read(fonts.cache(),hash) -- for tracing purposes
+ local fontdata = containers.read(fonts.cache,hash) -- for tracing purposes
end
local fontdata = define.registered(hash) -- id
if not fontdata then
@@ -11894,7 +11567,7 @@ function define.read(specification,size,id) -- id can be optional, name can alre
end
end
if cache_them then
- fontdata = containers.write(fonts.cache(),hash,fontdata) -- for tracing purposes
+ fontdata = containers.write(fonts.cache,hash,fontdata) -- for tracing purposes
end
if fontdata then
fontdata.hash = hash
@@ -11906,9 +11579,9 @@ function define.read(specification,size,id) -- id can be optional, name can alre
end
define.last = fontdata or id -- todo ! ! ! ! !
if not fontdata then
- logs.report("define font", "unknown font %s, loading aborted",specification.name)
+ report_define( "unknown font %s, loading aborted",specification.name)
elseif trace_defining and type(fontdata) == "table" then
- logs.report("define font","using %s font with id %s, name:%s size:%s bytes:%s encoding:%s fullname:%s filename:%s",
+ report_define("using %s font with id %s, name:%s size:%s bytes:%s encoding:%s fullname:%s filename:%s",
fontdata.type or "unknown",
id or "?",
fontdata.name or "?",
@@ -11929,18 +11602,18 @@ function vf.find(name)
local format = fonts.logger.format(name)
if format == 'tfm' or format == 'ofm' then
if trace_defining then
- logs.report("define font","locating vf for %s",name)
+ report_define("locating vf for %s",name)
end
return resolvers.findbinfile(name,"ovf")
else
if trace_defining then
- logs.report("define font","vf for %s is already taken care of",name)
+ report_define("vf for %s is already taken care of",name)
end
return nil -- ""
end
else
if trace_defining then
- logs.report("define font","locating vf for %s",name)
+ report_define("locating vf for %s",name)
end
return resolvers.findbinfile(name,"ovf")
end
diff --git a/tex/generic/context/luatex-fonts.lua b/tex/generic/context/luatex-fonts.lua
index 84acb2b18..0d89a60e2 100644
--- a/tex/generic/context/luatex-fonts.lua
+++ b/tex/generic/context/luatex-fonts.lua
@@ -91,11 +91,8 @@ else
-- modules contain a little bit of code that is not used. It's
-- not worth weeding.
- loadmodule('node-ini.lua')
- loadmodule('node-res.lua') -- will be stripped
- loadmodule('node-inj.lua') -- will be replaced (luatex > .50)
- loadmodule('node-fnt.lua')
loadmodule('node-dum.lua')
+ loadmodule('node-inj.lua') -- will be replaced (luatex >= .70)
-- Now come the font modules that deal with traditional TeX fonts
-- as well as open type fonts. We don't load the afm related code
@@ -117,7 +114,7 @@ else
loadmodule('font-oti.lua')
loadmodule('font-otb.lua')
loadmodule('font-otn.lua')
- loadmodule('font-ota.lua') -- might be split
+ loadmodule('font-ota.lua')
loadmodule('font-otc.lua')
loadmodule('font-def.lua')
loadmodule('font-xtx.lua')
diff --git a/web2c/context.cnf b/web2c/context.cnf
index 1263aaf4f..be2fe4d5c 100644
--- a/web2c/context.cnf
+++ b/web2c/context.cnf
@@ -12,22 +12,23 @@ progname = unsetprogname
engine = unsetengine
backend = unsetbackend
+TEXMFOS = $SELFAUTODIR
+TEXMFSYSTEM = $SELFAUTOPARENT/texmf-$SELFAUTOSYSTEM
TEXMFMAIN = $SELFAUTOPARENT/texmf
TEXMFLOCAL = $SELFAUTOPARENT/texmf-local
TEXMFFONTS = $SELFAUTOPARENT/texmf-fonts
-TEXMFEXTRA = $SELFAUTOPARENT/texmf-extra
TEXMFPROJECT = $SELFAUTOPARENT/texmf-project
+TEXMFCONTEXT = $SELFAUTOPARENT/texmf-context
VARTEXMF = $SELFAUTOPARENT/texmf-var
-HOMETEXMF = /nonexist
-TEXMF = {!!$TEXMFPROJECT,!!$TEXMFFONTS,!!$TEXMFLOCAL,!!$TEXMFEXTRA,!!$TEXMFMAIN}
-SYSTEXMF = $TEXMF
+HOMETEXMF = $HOME/texmf
-TEXMFCACHE = $TMP;$TEMP;$TMPDIR;$TEMPDIR;$HOME;$TEXMFVAR;$VARTEXMF;.
+TEXMF = {$TEXMFHOME,!!$TEXMFPROJECT,!!$TEXMFFONTS,!!$TEXMFLOCAL,!!$TEXMFCONTEXT,!!$TEXMFSYSTEM,!!$TEXMFMAIN}
+SYSTEXMF = $TEXMF
TEXMFCNF = .;$TEXMF/texmf{-local,}/web2c
TEXMFDBS = $TEXMF;$VARTEXFONTS
-VARTEXFONTS = $TEMPFONTPATH/varfonts
+VARTEXFONTS = $VARTEXMF/varfonts
% In the case of an multi-os setup, this one can be set
% by the environment. watch out, lowercase engine !
@@ -60,10 +61,10 @@ TEXFONTMAPS = .;$TEXMF/fonts/{data,map}/{$progname,$engine,pdftex,dvips,}//;$T
VFFONTS = $TEXMF/fonts/{data,vf}//
TFMFONTS = $TEXMF/fonts/{data,tfm}//
-T1FONTS = $TEXMF/fonts/{data,type1,pfb}//;$TEXMF/fonts/misc/hbf//;$OSFONTDIR;
-AFMFONTS = $TEXMF/fonts/{data,afm}//;$OSFONTDIR;
+T1FONTS = $TEXMF/fonts/{data,type1,pfb}//;$TEXMF/fonts/misc/hbf//;$OSFONTDIR
+AFMFONTS = $TEXMF/fonts/{data,afm}//;$OSFONTDIR
LIGFONTS = $TEXMF/fonts/lig//
-TTFONTS = $TEXMF/fonts/{data,truetype,ttf}//;$OSFONTDIR;
+TTFONTS = $TEXMF/fonts/{data,truetype,ttf}//;$OSFONTDIR
TTF2TFMINPUTS = $TEXMF/ttf2pk//
T42FONTS = $TEXMF/fonts/type42//
MISCFONTS = $TEXMF/fonts/misc//
@@ -77,61 +78,36 @@ FONTFEATURES = $TEXMF/fonts/fea//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS
FONTCIDMAPS = $TEXMF/fonts/cid//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS
OFMFONTS = $TEXMF/fonts/{data,ofm,tfm}//
-OPLFONTS = $TEXMF/fonts/opl//;
+OPLFONTS = $TEXMF/fonts/{data,opl}//
OVFFONTS = $TEXMF/fonts/{data,ovf,vf}//
-OVPFONTS = $TEXMF/fonts/ovp//;
+OVPFONTS = $TEXMF/fonts/{data,ovp}//
OTPINPUTS = $TEXMF/omega/otp//
OCPINPUTS = $TEXMF/omega/ocp//
OTFFONTS = $TEXMF/fonts/otf/{data,xetex,}//;$OSFONTDIR
% configurations
-% resource paths, can be used in paranoid situations (can be env vars)
-
-TXRESOURCES=unset
-MPRESOURCES=$TXRESOURCES
-MFRESOURCES=$MPRESOURCES
-
-% some extra paths for development trees (can be env vars)
-
-CTXDEVTXPATH=unset
-CTXDEVMPPATH=unset
-CTXDEVMFPATH=unset
-
-TEXINPUTS = .;{$TXRESOURCES}//;{$CTXDEVTXPATH};$TEXMF/tex/{$progname,plain,generic,}//
-TEXINPUTS.context = .;{$TXRESOURCES}//;{$CTXDEVTXPATH};$TEXMF/tex/{context,plain/base,generic}//
-MPINPUTS = .;{$MFRESOURCES}//;{$CTXDEVMPPATH};$TEXMF/metapost/{context,base,}//
-MFINPUTS = .;{$MPRESOURCES}//;{$CTXDEVMFPATH};$TEXMF/metafont/{context,base,}//;$TEXMF/fonts/source//
-BSTINPUTS = .;{$TXRESOURCES}//;{$CTXDEVTXPATH};$TEXMF/bibtex/bst//
+TEXINPUTS = .;$TEXMF/tex/{$progname,plain,generic,}//
+TEXINPUTS.context = .;$TEXMF/tex/{context,plain/base,generic}//
+MPINPUTS = .;$TEXMF/metapost/{context,base,}//
+MFINPUTS = .;$TEXMF/metafont/{context,base,}//;$TEXMF/fonts/source//
+BSTINPUTS = .;$TEXMF/bibtex/bst//
+BIBINPUTS = .;$TEXMF/bibtex/bib//
TEXCONFIG = $TEXMF/{fonts/map,dvips,pdftex,dvipdfmx,dvipdfm}//
PDFTEXCONFIG = $TEXMF/pdftex/{$progname,}//
DVIPDFMINPUTS = $TEXMF/{fonts/{map,enc,lig}/dvipdfm,fonts/type1,dvips,pdftex,dvipdfmx,dvipdfm}//
-% this way we can hook in development paths
-
-CTXDEVPLPATH=unset
-CTXDEVPYPATH=unset
-CTXDEVRBPATH=unset
-CTXDEVJVPATH=unset
-
% some old paths; we restrict the search to context paths; new ones as well as old ones
-PERLINPUTS = .;$CTXDEVPLPATH;$TEXMF/scripts/context/perl
-PYTHONINPUTS = .;$CTXDEVPYPATH;$TEXMF/scripts/context/python
-RUBYINPUTS = .;$CTXDEVRBPATH;$TEXMF/scripts/context/ruby
-% LUAINPUTS = .;$CTXDEVLUPATH;$TEXMF/scripts/context/lua
-JAVAINPUTS = .;$CTXDEVJVPATH;$TEXMF/scripts/context/java
-
-% LUAINPUTS = .;$TEXINPUTS;$TEXMFSCRIPTS
-LUAINPUTS = .;$CTXDEVLUPATH;$TEXINPUTS;$TEXMF/scripts/context/lua//
-TEXMFSCRIPTS = .;$CTXDEVLUPATH;$TEXINPUTS;$CTXDEVRBPATH;$CTXDEVPLPATH;$TEXMF/scripts/context/{lua,ruby,perl}//
+PERLINPUTS = .;$TEXMF/scripts/context/perl
+PYTHONINPUTS = .;$TEXMF/scripts/context/python
+RUBYINPUTS = .;$TEXMF/scripts/context/ruby
+% LUAINPUTS = .;$TEXMF/scripts/context/lua
+JAVAINPUTS = .;$TEXMF/scripts/context/java
-% RUBYINPUTS = .;$CTXDEVPLPATH;$TEXMF/scripts/{$progname,$engine,}/ruby
-% LUAINPUTS = .;$CTXDEVPYPATH;$TEXMF/scripts/{$progname,$engine,}/lua
-% PYTHONINPUTS = .;$CTXDEVRBPATH;$TEXMF/scripts/{$progname,$engine,}/python
-% PERLINPUTS = .;$CTXDEVJVPATH;$TEXMF/scripts/{$progname,$engine,}/perl
-% JAVAINPUTS = .;$CTXDEVJVPATH;$TEXMF/scripts/{$progname,$engine,}/java
+LUAINPUTS = .;$TEXINPUTS;$TEXMF/scripts/context/lua//
+TEXMFSCRIPTS = .;$TEXINPUTS;$TEXMF/scripts/context/{lua,ruby,perl}//
CLUAINPUTS = .;$SELFAUTOLOC/lib/{$progname,$engine,}/lua//
@@ -244,9 +220,9 @@ max_print_line.metafun = 255
extra_mem_top.mptopdf = 1000000
extra_mem_bot.mptopdf = 1000000
-% ocp_buf_size = 500000
-% ocp_stack_size = 10000
-% ocp_list_size = 1000
+% ocp_buf_size = 500000
+% ocp_stack_size = 10000
+% ocp_list_size = 1000
ocp_buf_size = 1
ocp_stack_size = 1
@@ -255,5 +231,5 @@ ocp_list_size = 1
% Just for xetex:
FONTCONFIG_FILE = fonts.conf
-FONTCONFIG_PATH = $TEXMFLOCAL/fonts/conf
-FC_CACHEDIR = $TEXMFLOCAL/fonts/cache
+FONTCONFIG_PATH = $TEXMFSYSTEM/fonts/conf
+FC_CACHEDIR = $TEXMFSYSTEM/fonts/cache
diff --git a/web2c/contextcnf.lua b/web2c/contextcnf.lua
index 166a7504e..de192ec0a 100644
--- a/web2c/contextcnf.lua
+++ b/web2c/contextcnf.lua
@@ -1,38 +1,135 @@
--- filename : texmfcnf.lua
--- comment : companion to luatex/mkiv
--- authors : Hans Hagen & Taco Hoekwater
--- copyright: not relevant
--- license : not relevant
-
--- This file is read bij luatools, mtxrun and context mkiv. This is still
--- somewhat experimental and eventually we will support booleans instead
--- of the 't' strings. The content is similar to that of texmf.cnf. Both
--- namespaces strings
---
--- TEXINPUT.context = "..."
---
--- and subtables (
---
--- context = { TEXINPUT = ".." }
---
--- are supported with the later a being the way to go. You can test settings
--- with:
---
--- luatools --expand-var TEXMFBOGUS
---
--- which should return
---
--- It works!
---
--- We first read the lua configuration file(s) and then do a first variable
--- expansion pass. Next we read the regular cnf files. These are cached
--- in the mkiv cache for faster loading. The lua configuration files are
--- not cached.
-
return {
--- LUACSTRIP = 'f', -- don't strip luc files (only use this for debugging, otherwise slower loading and bigger cache)
--- CACHEINTDS = 't', -- keep filedatabase and configuration in tds tree
--- PURGECACHE = 't', -- this saves disk space
- TEXMFCACHE = 'c:/temp', -- installers can change this
--- TEXMFBOGUS = 'It works!', -- a test string
+
+ type = "configuration",
+ version = "1.0.2",
+ date = "2010-06-07",
+ time = "14:49:00",
+ comment = "ConTeXt MkIV configuration file",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+
+ content = {
+
+ -- LUACSTRIP = 'f',
+ -- PURGECACHE = 't',
+
+ TEXMFCACHE = "$SELFAUTOPARENT/texmf-cache",
+
+ TEXMFOS = "$SELFAUTODIR",
+ TEXMFSYSTEM = "$SELFAUTOPARENT/texmf-$SELFAUTOSYSTEM",
+ TEXMFMAIN = "$SELFAUTOPARENT/texmf",
+ TEXMFCONTEXT = "$SELFAUTOPARENT/texmf-context",
+ TEXMFLOCAL = "$SELFAUTOPARENT/texmf-local",
+ TEXMFFONTS = "$SELFAUTOPARENT/texmf-fonts",
+ TEXMFPROJECT = "$SELFAUTOPARENT/texmf-project",
+
+ -- I don't like this texmf under home and texmf-home would make more
+ -- sense. One never knows what installers put under texmf anywhere and
+ -- sorting out problems will be a pain.
+
+ TEXMFHOME = "$HOME/texmf", -- "tree:///$HOME/texmf
+
+ -- We need texmfos for a few rare files but as I have a few more bin trees
+ -- a hack is needed. Maybe other users also have texmf-platform-new trees.
+
+ TEXMF = "{$TEXMFHOME,!!$TEXMFPROJECT,!!$TEXMFFONTS,!!$TEXMFLOCAL,!!$TEXMFCONTEXT,!!$TEXMFSYSTEM,!!$TEXMFMAIN}",
+
+ TEXFONTMAPS = ".;$TEXMF/fonts/{data,map}/{pdftex,dvips}//",
+ ENCFONTS = ".;$TEXMF/fonts/{data,enc}/{dvips,pdftex}//",
+ VFFONTS = ".;$TEXMF/fonts/{data,vf}//",
+ TFMFONTS = ".;$TEXMF/fonts/{data,tfm}//",
+ T1FONTS = ".;$TEXMF/fonts/{data,type1,pfb}//;$OSFONTDIR",
+ AFMFONTS = ".;$TEXMF/fonts/{data,afm}//;$OSFONTDIR",
+ TTFONTS = ".;$TEXMF/fonts/{data,truetype,ttf}//;$OSFONTDIR",
+ OPENTYPEFONTS = ".;$TEXMF/fonts/{data,opentype}//;$OSFONTDIR",
+ CMAPFONTS = ".;$TEXMF/fonts/cmap//",
+ FONTFEATURES = ".;$TEXMF/fonts/{data,fea}//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS",
+ FONTCIDMAPS = ".;$TEXMF/fonts/{data,cid}//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS",
+ OFMFONTS = ".;$TEXMF/fonts/{data,ofm,tfm}//",
+ OVFFONTS = ".;$TEXMF/fonts/{data,ovf,vf}//",
+
+ TEXINPUTS = ".;$TEXMF/tex/{context,plain/base,generic}//",
+ MPINPUTS = ".;$TEXMF/metapost/{context,base,}//",
+
+ -- In the next variable the inputs path will go away.
+
+ TEXMFSCRIPTS = ".;$TEXMF/scripts/context/{lua,ruby,python,perl}//;$TEXINPUTS",
+ PERLINPUTS = ".;$TEXMF/scripts/context/perl",
+ PYTHONINPUTS = ".;$TEXMF/scripts/context/python",
+ RUBYINPUTS = ".;$TEXMF/scripts/context/ruby",
+ LUAINPUTS = ".;$TEXINPUTS;$TEXMF/scripts/context/lua//",
+ CLUAINPUTS = ".;$SELFAUTOLOC/lib/{$progname,$engine,}/lua//",
+
+ -- Not really used by MkIV so they might go away.
+
+ BIBINPUTS = ".;$TEXMF/bibtex/bib//",
+ BSTINPUTS = ".;$TEXMF/bibtex/bst//",
+
+ -- Sort of obsolete.
+
+ OTPINPUTS = ".;$TEXMF/omega/otp//",
+ OCPINPUTS = ".;$TEXMF/omega/ocp//",
+
+ -- A few special ones that will change some day.
+
+ FONTCONFIG_FILE = "fonts.conf",
+ FONTCONFIG_PATH = "$TEXMFSYSTEM/fonts/conf",
+ FC_CACHEDIR = "$TEXMFSYSTEM/fonts/cache", -- not needed
+
+ -- Some of the following parameters will disappear. Also, some are
+ -- not used at all as we disable the ocp mechanism. At some point
+ -- it makes more sense then to turn then into directives.
+
+ context = {
+
+ hash_extra = "100000",
+ nest_size = "500",
+ param_size = "10000",
+ save_size = "50000",
+ stack_size = "10000",
+ expand_depth = "10000",
+ max_print_line = "10000",
+ max_in_open = "256",
+
+ ocp_stack_size = "10000",
+ ocp_list_size = "1000",
+
+ buf_size = "4000000", -- obsolete
+ ocp_buf_size = "500000", -- obsolete
+
+ },
+
+ -- We have a few reserved subtables. These control runtime behaviour. The
+ -- keys have names like 'foo.bar' which means that you have to use keys
+ -- like ['foo.bar'] so for convenience we also support 'foo_bar'.
+
+ directives = {
+ -- system_checkglobals = "10",
+ -- system.nostatistics = "yes",
+ system_errorcontext = "10",
+ },
+
+ experiments = {
+
+ },
+
+ trackers = {
+
+ },
+
+ -- The io modes are similar to the traditional ones. Possible values
+ -- are all, paranoid and restricted.
+
+ output_mode = "restricted",
+ input_mode = "any",
+
+ -- The following variable is under consideration. We do have protection
+ -- mechanims but it's not enabled by default.
+
+ command_mode = "any", -- any none list
+ command_list = "mtxrun, convert, inkscape, gs, imagemagick, curl, bibtex, pstoedit",
+
+ },
+
+ TEXMFCACHE = "$SELFAUTOPARENT/texmf-cache", -- for old times sake
+
}