summaryrefslogtreecommitdiff
path: root/tex
diff options
context:
space:
mode:
authorHans Hagen <pragma@wxs.nl>2010-09-03 11:05:00 +0200
committerHans Hagen <pragma@wxs.nl>2010-09-03 11:05:00 +0200
commit0da1a7a94f55a5dc0d318f399eb843303d5b62f6 (patch)
treeb5493406b2f6d8954204b532df5549acbc4baee8 /tex
parentbbc8970958af29626335568414a4278d852e086f (diff)
downloadcontext-0da1a7a94f55a5dc0d318f399eb843303d5b62f6.tar.gz
beta 2010.09.03 11:05
Diffstat (limited to 'tex')
-rw-r--r--tex/context/base/anch-pos.lua21
-rw-r--r--tex/context/base/attr-col.lua17
-rw-r--r--tex/context/base/attr-eff.lua9
-rw-r--r--tex/context/base/attr-lay.lua6
-rw-r--r--tex/context/base/back-ini.lua6
-rw-r--r--tex/context/base/back-pdf.lua2
-rw-r--r--tex/context/base/back-pdf.mkiv2
-rw-r--r--tex/context/base/bibl-bib.lua4
-rw-r--r--tex/context/base/bibl-tra.lua2
-rw-r--r--tex/context/base/bibl-tra.mkii2
-rw-r--r--tex/context/base/bibl-tra.mkiv2
-rw-r--r--tex/context/base/buff-ini.lua15
-rw-r--r--tex/context/base/buff-ver.mkiv2
-rw-r--r--tex/context/base/catc-ctx.tex208
-rw-r--r--tex/context/base/catc-xml.tex137
-rw-r--r--tex/context/base/char-cmp.lua16
-rw-r--r--tex/context/base/char-enc.lua4
-rw-r--r--tex/context/base/char-ini.lua338
-rw-r--r--tex/context/base/char-ini.mkiv15
-rw-r--r--tex/context/base/char-tex.lua6
-rw-r--r--tex/context/base/char-utf.lua27
-rw-r--r--tex/context/base/char-utf.mkiv2
-rw-r--r--tex/context/base/colo-icc.lua2
-rw-r--r--tex/context/base/cont-new.tex2
-rw-r--r--tex/context/base/context.mkii1
-rw-r--r--tex/context/base/context.mkiv6
-rw-r--r--tex/context/base/context.tex2
-rw-r--r--tex/context/base/core-con.lua25
-rw-r--r--tex/context/base/core-ini.mkiv7
-rw-r--r--tex/context/base/core-job.lua10
-rw-r--r--tex/context/base/core-job.mkiv10
-rw-r--r--tex/context/base/core-mis.mkiv17
-rw-r--r--tex/context/base/core-two.lua15
-rw-r--r--tex/context/base/core-uti.lua44
-rw-r--r--tex/context/base/core-var.mkiv17
-rw-r--r--tex/context/base/data-aux.lua8
-rw-r--r--tex/context/base/data-con.lua3
-rw-r--r--tex/context/base/data-ctx.lua4
-rw-r--r--tex/context/base/data-env.lua30
-rw-r--r--tex/context/base/data-exp.lua24
-rw-r--r--tex/context/base/data-ini.lua10
-rw-r--r--tex/context/base/data-inp.lua12
-rw-r--r--tex/context/base/data-lua.lua10
-rw-r--r--tex/context/base/data-met.lua9
-rw-r--r--tex/context/base/data-pre.lua16
-rw-r--r--tex/context/base/data-res.lua258
-rw-r--r--tex/context/base/data-tex.lua10
-rw-r--r--tex/context/base/data-tmp.lua16
-rw-r--r--tex/context/base/data-tre.lua2
-rw-r--r--tex/context/base/data-use.lua6
-rw-r--r--tex/context/base/data-zip.lua14
-rw-r--r--tex/context/base/font-afm.lua86
-rw-r--r--tex/context/base/font-agl.lua11
-rw-r--r--tex/context/base/font-chk.lua8
-rw-r--r--tex/context/base/font-cid.lua2
-rw-r--r--tex/context/base/font-clr.lua7
-rw-r--r--tex/context/base/font-ctx.lua137
-rw-r--r--tex/context/base/font-def.lua201
-rw-r--r--tex/context/base/font-dum.lua25
-rw-r--r--tex/context/base/font-enc.lua14
-rw-r--r--tex/context/base/font-ext.lua24
-rw-r--r--tex/context/base/font-fbk.lua6
-rw-r--r--tex/context/base/font-gds.lua16
-rw-r--r--tex/context/base/font-ini.lua21
-rw-r--r--tex/context/base/font-ini.mkiv14
-rw-r--r--tex/context/base/font-map.lua26
-rw-r--r--tex/context/base/font-mis.lua2
-rw-r--r--tex/context/base/font-ota.lua24
-rw-r--r--tex/context/base/font-otb.lua4
-rw-r--r--tex/context/base/font-otd.lua31
-rw-r--r--tex/context/base/font-otf.lua124
-rw-r--r--tex/context/base/font-oth.lua4
-rw-r--r--tex/context/base/font-oti.lua7
-rw-r--r--tex/context/base/font-otn.lua66
-rw-r--r--tex/context/base/font-ott.lua48
-rw-r--r--tex/context/base/font-syn.lua224
-rw-r--r--tex/context/base/font-tfm.lua133
-rw-r--r--tex/context/base/font-vf.lua37
-rw-r--r--tex/context/base/font-xtx.lua12
-rw-r--r--tex/context/base/grph-fig.mkiv12
-rw-r--r--tex/context/base/grph-fil.lua41
-rw-r--r--tex/context/base/grph-inc.lua58
-rw-r--r--tex/context/base/grph-swf.lua1
-rw-r--r--tex/context/base/grph-u3d.lua1
-rw-r--r--tex/context/base/java-ini.lua11
-rw-r--r--tex/context/base/l-file.lua16
-rw-r--r--tex/context/base/l-io.lua10
-rw-r--r--tex/context/base/l-lpeg.lua6
-rw-r--r--tex/context/base/lang-ini.lua10
-rw-r--r--tex/context/base/lang-ini.mkiv5
-rw-r--r--tex/context/base/lang-url.lua2
-rw-r--r--tex/context/base/lang-wrd.lua26
-rw-r--r--tex/context/base/lang-wrd.mkiv4
-rw-r--r--tex/context/base/lpdf-ano.lua182
-rw-r--r--tex/context/base/lpdf-fld.lua48
-rw-r--r--tex/context/base/lpdf-fmt.lua (renamed from tex/context/base/lpdf-pdx.lua)126
-rw-r--r--tex/context/base/lpdf-ini.lua48
-rw-r--r--tex/context/base/lpdf-mis.lua1
-rw-r--r--tex/context/base/lpdf-pda.xml171
-rw-r--r--tex/context/base/lpdf-pdx.xml (renamed from tex/context/base/lpdf-xmp.xml)2
-rw-r--r--tex/context/base/lpdf-ren.lua28
-rw-r--r--tex/context/base/lpdf-tag.lua137
-rw-r--r--tex/context/base/lpdf-wid.lua11
-rw-r--r--tex/context/base/lpdf-xmp.lua26
-rw-r--r--tex/context/base/luat-cbk.lua2
-rw-r--r--tex/context/base/luat-cnf.lua9
-rw-r--r--tex/context/base/luat-cod.lua3
-rw-r--r--tex/context/base/luat-dum.lua8
-rw-r--r--tex/context/base/luat-env.lua20
-rw-r--r--tex/context/base/luat-fmt.lua12
-rw-r--r--tex/context/base/luat-ini.lua6
-rw-r--r--tex/context/base/luat-iop.lua8
-rw-r--r--tex/context/base/luat-lib.mkiv1
-rw-r--r--tex/context/base/luat-sto.lua25
-rw-r--r--tex/context/base/lxml-aux.lua158
-rw-r--r--tex/context/base/lxml-ini.mkiv18
-rw-r--r--tex/context/base/lxml-lpt.lua74
-rw-r--r--tex/context/base/lxml-tab.lua3
-rw-r--r--tex/context/base/lxml-tex.lua86
-rw-r--r--tex/context/base/m-punk.mkiv10
-rw-r--r--tex/context/base/math-ent.lua2
-rw-r--r--tex/context/base/math-ini.lua6
-rw-r--r--tex/context/base/math-ini.mkiv4
-rw-r--r--tex/context/base/math-map.lua4
-rw-r--r--tex/context/base/math-noa.lua2
-rw-r--r--tex/context/base/math-vfu.lua76
-rw-r--r--tex/context/base/meta-ini.mkiv8
-rw-r--r--tex/context/base/meta-pdf.lua11
-rw-r--r--tex/context/base/meta-pdh.lua12
-rw-r--r--tex/context/base/mlib-ctx.lua4
-rw-r--r--tex/context/base/mlib-pdf.lua5
-rw-r--r--tex/context/base/mlib-pps.lua150
-rw-r--r--tex/context/base/mlib-pps.mkiv23
-rw-r--r--tex/context/base/mlib-run.lua2
-rw-r--r--tex/context/base/mult-chk.lua4
-rw-r--r--tex/context/base/mult-cld.lua59
-rw-r--r--tex/context/base/node-dir.lua32
-rw-r--r--tex/context/base/node-dum.lua2
-rw-r--r--tex/context/base/node-ini.lua36
-rw-r--r--tex/context/base/node-inj.lua19
-rw-r--r--tex/context/base/node-ref.lua65
-rw-r--r--tex/context/base/node-rul.lua14
-rw-r--r--tex/context/base/node-ser.lua28
-rw-r--r--tex/context/base/node-spl.lua15
-rw-r--r--tex/context/base/node-tra.lua79
-rw-r--r--tex/context/base/node-tsk.lua4
-rw-r--r--tex/context/base/node-tst.lua4
-rw-r--r--tex/context/base/pack-obj.lua14
-rw-r--r--tex/context/base/page-imp.mkiv38
-rw-r--r--tex/context/base/page-lin.lua6
-rw-r--r--tex/context/base/page-mak.mkii4
-rw-r--r--tex/context/base/page-mak.mkiv4
-rw-r--r--tex/context/base/page-mar.mkiv16
-rw-r--r--tex/context/base/regi-ini.lua4
-rw-r--r--tex/context/base/scrn-men.mkiv132
-rw-r--r--tex/context/base/scrp-ini.lua32
-rw-r--r--tex/context/base/sort-ini.lua8
-rw-r--r--tex/context/base/sort-lan.lua2577
-rw-r--r--tex/context/base/spac-ali.mkiv1
-rw-r--r--tex/context/base/spac-ver.lua154
-rw-r--r--tex/context/base/spac-ver.mkiv19
-rw-r--r--tex/context/base/strc-bkm.lua2
-rw-r--r--tex/context/base/strc-blk.lua18
-rw-r--r--tex/context/base/strc-def.mkiv25
-rw-r--r--tex/context/base/strc-des.mkiv3
-rw-r--r--tex/context/base/strc-doc.lua284
-rw-r--r--tex/context/base/strc-flt.mkiv2
-rw-r--r--tex/context/base/strc-ini.lua53
-rw-r--r--tex/context/base/strc-itm.lua7
-rw-r--r--tex/context/base/strc-itm.mkiv5
-rw-r--r--tex/context/base/strc-lst.lua57
-rw-r--r--tex/context/base/strc-lst.mkiv4
-rw-r--r--tex/context/base/strc-mar.lua2
-rw-r--r--tex/context/base/strc-mat.mkiv31
-rw-r--r--tex/context/base/strc-not.lua13
-rw-r--r--tex/context/base/strc-not.mkiv5
-rw-r--r--tex/context/base/strc-num.lua144
-rw-r--r--tex/context/base/strc-pag.lua109
-rw-r--r--tex/context/base/strc-pag.mkiv6
-rw-r--r--tex/context/base/strc-ref.lua161
-rw-r--r--tex/context/base/strc-ref.mkiv16
-rw-r--r--tex/context/base/strc-reg.lua70
-rw-r--r--tex/context/base/strc-reg.mkiv38
-rw-r--r--tex/context/base/strc-ren.mkiv5
-rw-r--r--tex/context/base/strc-sec.mkiv8
-rw-r--r--tex/context/base/strc-syn.lua18
-rw-r--r--tex/context/base/strc-tag.lua152
-rw-r--r--tex/context/base/strc-tag.mkiv28
-rw-r--r--tex/context/base/strc-xml.mkiv8
-rw-r--r--tex/context/base/supp-box.tex2
-rw-r--r--tex/context/base/supp-fil.lua2
-rw-r--r--tex/context/base/supp-fil.mkiv12
-rw-r--r--tex/context/base/supp-mat.mkiv2
-rw-r--r--tex/context/base/syst-con.lua1
-rw-r--r--tex/context/base/tabl-ltb.mkiv11
-rw-r--r--tex/context/base/tabl-ntb.mkiv17
-rw-r--r--tex/context/base/tabl-tbl.mkiv8
-rw-r--r--tex/context/base/task-ini.lua6
-rw-r--r--tex/context/base/toks-ini.lua12
-rw-r--r--tex/context/base/trac-deb.lua4
-rw-r--r--tex/context/base/trac-lmx.lua8
-rw-r--r--tex/context/base/trac-set.lua3
-rw-r--r--tex/context/base/trac-tex.lua2
-rw-r--r--tex/context/base/typo-dig.lua3
-rw-r--r--tex/context/base/util-lua.lua2
-rw-r--r--tex/context/base/util-sto.lua90
-rw-r--r--tex/context/base/x-calcmath.mkiv9
-rw-r--r--tex/context/base/x-chemml.mkiv40
-rw-r--r--tex/context/base/x-mathml.mkiv166
-rw-r--r--tex/context/base/x-pending.mkiv2
-rw-r--r--tex/context/base/x-xtag.mkiv4
-rw-r--r--tex/context/base/xtag-ini.tex22
-rw-r--r--tex/context/interface/cont-cs.xml6
-rw-r--r--tex/context/interface/cont-de.xml6
-rw-r--r--tex/context/interface/cont-en.xml6
-rw-r--r--tex/context/interface/cont-fr.xml6
-rw-r--r--tex/context/interface/cont-it.xml6
-rw-r--r--tex/context/interface/cont-nl.xml6
-rw-r--r--tex/context/interface/cont-pe.xml6
-rw-r--r--tex/context/interface/cont-ro.xml6
-rw-r--r--tex/context/sample/douglas.tex26
-rw-r--r--tex/context/test/pdf-x-common.mkiv8
-rw-r--r--tex/generic/context/luatex-fonts-merged.lua790
-rw-r--r--tex/generic/context/luatex-fonts.lua2
224 files changed, 7017 insertions, 3411 deletions
diff --git a/tex/context/base/anch-pos.lua b/tex/context/base/anch-pos.lua
index b83550922..28d32089d 100644
--- a/tex/context/base/anch-pos.lua
+++ b/tex/context/base/anch-pos.lua
@@ -15,22 +15,24 @@ more efficient.</p>
local concat, format = table.concat, string.format
local texprint, ctxcatcodes = tex.print, tex.ctxcatcodes
local lpegmatch = lpeg.match
+local allocate, mark = utilities.storage.allocate, utilities.storage.mark
+
+local collected, tobesaved = allocate(), allocate()
local jobpositions = {
- collected = { },
- tobesaved = { },
+ collected = collected,
+ tobesaved = tobesaved,
}
job.positions = jobpositions
-local tobesaved, collected = jobpositions.tobesaved, jobpositions.collected
-
_plib_, _ptbs_, _pcol_ = jobpositions, tobesaved, collected -- global
local dx, dy = "0pt", "0pt"
local function initializer()
- tobesaved, collected = jobpositions.tobesaved, jobpositions.collected -- local
+ tobesaved = mark(jobpositions.tobesaved)
+ collected = mark(jobpositions.collected)
_ptbs_, _pcol_ = tobesaved, collected -- global
local p = collected["page:0"] -- page:1
if p then
@@ -39,18 +41,18 @@ local function initializer()
end
end
-job.register('job.positions.collected', jobpositions.tobesaved, initializer)
+job.register('job.positions.collected', tobesaved, initializer)
function jobpositions.copy(target,source)
- jobpositions.collected[target] = jobpositions.collected[source] or tobesaved[source]
+ collected[target] = collected[source] or tobesaved[source]
end
function jobpositions.replace(name,...)
- jobpositions.collected[name] = {...}
+ collected[name] = {...}
end
function jobpositions.doifelse(name)
- commands.testcase(jobpositions.collected[name] or tobesaved[name])
+ commands.testcase(collected[name] or tobesaved[name])
end
function jobpositions.MPp(id) local jpi = collected[id] or tobesaved[id] texprint(ctxcatcodes,(jpi and jpi[1]) or '0' ) end
@@ -60,7 +62,6 @@ function jobpositions.MPw(id) local jpi = collected[id] or tobesaved[id] texprin
function jobpositions.MPh(id) local jpi = collected[id] or tobesaved[id] texprint(ctxcatcodes,(jpi and jpi[5]) or '0pt') end
function jobpositions.MPd(id) local jpi = collected[id] or tobesaved[id] texprint(ctxcatcodes,(jpi and jpi[6]) or '0pt') end
-
function jobpositions.MPx(id)
local jpi = collected[id] or tobesaved[id]
local x = jpi and jpi[2]
diff --git a/tex/context/base/attr-col.lua b/tex/context/base/attr-col.lua
index 1ff1a07ed..28e02edd6 100644
--- a/tex/context/base/attr-col.lua
+++ b/tex/context/base/attr-col.lua
@@ -13,6 +13,8 @@ local type = type
local format = string.format
local concat = table.concat
+local allocate = utilities.storage.allocate
+
local report_attributes = logs.new("attributes")
local report_colors = logs.new("colors")
local report_transparencies = logs.new("transparencies")
@@ -63,8 +65,9 @@ local unsetvalue = attributes.unsetvalue
attributes.colors = attributes.colors or { }
local colors = attributes.colors _clib_ = colors -- fast access (less tokens too)
-colors.data = colors.data or { }
-colors.values = colors.values or { }
+
+colors.data = allocate()
+colors.values = colors.values or { }
colors.registered = colors.registered or { }
colors.weightgray = true
@@ -346,8 +349,8 @@ end
attributes.transparencies = attributes.transparencies or { }
local transparencies = attributes.transparencies _tlib_ = transparencies -- fast access (less tokens too)
transparencies.registered = transparencies.registered or { }
-transparencies.data = transparencies.data or { }
-transparencies.values = transparencies.values or { }
+transparencies.data = allocate()
+transparencies.values = transparencies.values or { }
transparencies.triggering = true
transparencies.attribute = attributes.private('transparency')
transparencies.supported = true
@@ -448,12 +451,12 @@ end
--- colorintents: overprint / knockout
-attributes.colorintents = attributes.colorintents or {}
+attributes.colorintents = attributes.colorintents or { }
local colorintents = attributes.colorintents
-colorintents.data = colorintents.data or { }
+colorintents.data = allocate() -- colorintents.data or { }
colorintents.attribute = attributes.private('colorintent')
-colorintents.registered = {
+colorintents.registered = allocate {
overprint = 1,
knockout = 2,
}
diff --git a/tex/context/base/attr-eff.lua b/tex/context/base/attr-eff.lua
index 4759ad1fa..53610cbff 100644
--- a/tex/context/base/attr-eff.lua
+++ b/tex/context/base/attr-eff.lua
@@ -8,6 +8,8 @@ if not modules then modules = { } end modules ['attr-eff'] = {
local format = string.format
+local allocate = utilities.storage.allocate
+
local attributes, nodes = attributes, nodes
local states = attributes.states
@@ -16,15 +18,16 @@ local nodeinjections = backends.nodeinjections
attributes.effects = attributes.effects or { }
local effects = attributes.effects
-effects.data = effects.data or { }
+effects.data = allocate()
effects.values = effects.values or { }
effects.registered = effects.registered or { }
-effects.stamp = "%s:%s:%s"
effects.attribute = attributes.private("effect")
storage.register("attributes/effects/registered", effects.registered, "attributes.effects.registered")
storage.register("attributes/effects/values", effects.values, "attributes.effects.values")
+local template = "%s:%s:%s"
+
local data, registered, values = effects.data, effects.registered, effects.values
-- valid effects: normal inner outer both hidden (stretch,rulethickness,effect)
@@ -50,7 +53,7 @@ setmetatable(effects, { __index = extender })
setmetatable(effects.data, { __index = reviver })
function effects.register(effect,stretch,rulethickness)
- local stamp = format(effects.stamp,effect,stretch,rulethickness)
+ local stamp = format(template,effect,stretch,rulethickness)
local n = registered[stamp]
if not n then
n = #values + 1
diff --git a/tex/context/base/attr-lay.lua b/tex/context/base/attr-lay.lua
index dd74698ef..df7a4e6bb 100644
--- a/tex/context/base/attr-lay.lua
+++ b/tex/context/base/attr-lay.lua
@@ -13,6 +13,8 @@ if not modules then modules = { } end modules ['attr-lay'] = {
local type = type
local format = string.format
+local allocate = utilities.storage.allocate
+
local report_viewerlayers = logs.new("viewerlayers")
-- todo: document this but first reimplement this as it reflects the early
@@ -28,10 +30,10 @@ attributes.viewerlayers = attributes.viewerlayers or { }
local viewerlayers = attributes.viewerlayers
viewerlayers = viewerlayers or { }
-viewerlayers.data = viewerlayers.data or { }
+viewerlayers.data = allocate()
viewerlayers.registered = viewerlayers.registered or { }
viewerlayers.values = viewerlayers.values or { }
-viewerlayers.listwise = viewerlayers.listwise or { }
+viewerlayers.listwise = allocate()
viewerlayers.attribute = attributes.private("viewerlayer")
viewerlayers.supported = true
viewerlayers.hasorder = true
diff --git a/tex/context/base/back-ini.lua b/tex/context/base/back-ini.lua
index b50699d15..dadaaa837 100644
--- a/tex/context/base/back-ini.lua
+++ b/tex/context/base/back-ini.lua
@@ -103,8 +103,6 @@ backends.codeinjections = {
setfigurealternative = nothing,
enabletags = nothing,
- maptag = nothing,
- mapping = nothing, -- returns table
mergereferences = nothing,
mergeviewerlayers = nothing,
@@ -115,7 +113,9 @@ backends.codeinjections = {
-- called in tex
- finalizepage = nothing -- will go when we have a hook at the lua end
+ finalizepage = nothing, -- will go when we have a hook at the lua end
+
+ finishreference = nothing,
}
diff --git a/tex/context/base/back-pdf.lua b/tex/context/base/back-pdf.lua
index ece325729..436685426 100644
--- a/tex/context/base/back-pdf.lua
+++ b/tex/context/base/back-pdf.lua
@@ -114,7 +114,7 @@ function nodeinjections.switchlayer(name)
return copy_node(c)
end
--- code
+-- code, will move to lpdf-*
function nodeinjections.insertmovie(specification)
-- managed in figure inclusion: width, height, factor, repeat, controls, preview, label, foundname
diff --git a/tex/context/base/back-pdf.mkiv b/tex/context/base/back-pdf.mkiv
index 1a901133d..94b93cb39 100644
--- a/tex/context/base/back-pdf.mkiv
+++ b/tex/context/base/back-pdf.mkiv
@@ -26,7 +26,7 @@
\registerctxluafile{lpdf-u3d}{1.001}
\registerctxluafile{lpdf-swf}{1.001}
\registerctxluafile{lpdf-tag}{1.001}
-\registerctxluafile{lpdf-pdx}{1.001}
+\registerctxluafile{lpdf-fmt}{1.001}
\registerctxluafile{lpdf-epd}{1.001}
\registerctxluafile{lpdf-epa}{1.001}
diff --git a/tex/context/base/bibl-bib.lua b/tex/context/base/bibl-bib.lua
index 8de1ac23a..db9fef14d 100644
--- a/tex/context/base/bibl-bib.lua
+++ b/tex/context/base/bibl-bib.lua
@@ -140,7 +140,7 @@ function bibtex.convert(session,content)
end
function bibtex.load(session,filename)
- local filename = resolvers.find_file(filename,"bib")
+ local filename = resolvers.findfile(filename,"bib")
if filename ~= "" then
local data = io.loaddata(filename) or ""
if data == "" then
@@ -295,7 +295,7 @@ end)
--~ print(table.serialize(session.shortcuts))
--~ print(xml.serialize(session.xml))
-if not characters then dofile(resolvers.find_file("char-def.lua")) end
+if not characters then dofile(resolvers.findfile("char-def.lua")) end
local chardata = characters.data
local concat = table.concat
diff --git a/tex/context/base/bibl-tra.lua b/tex/context/base/bibl-tra.lua
index 4c62475d0..60a673546 100644
--- a/tex/context/base/bibl-tra.lua
+++ b/tex/context/base/bibl-tra.lua
@@ -20,7 +20,7 @@ local trace_bibtex = false trackers.register("publications.bibtex", function(v)
local report_publications = logs.new("publications")
-local context, structures, references = context, structures, references
+local context, structures = context, structures
local references = structures.references
local sections = structures.sections
diff --git a/tex/context/base/bibl-tra.mkii b/tex/context/base/bibl-tra.mkii
index 087781db9..2c03a7f91 100644
--- a/tex/context/base/bibl-tra.mkii
+++ b/tex/context/base/bibl-tra.mkii
@@ -506,6 +506,8 @@
{#1\@EA\cite\@EA[\@@pb@crossref]#2}
{#3}}
+\let\insertcrossref\gobblethreearguments
+
\appendtoks\let\insertcrossref\bibinsertcrossref\to\initializebibdefinitions
%D \macros{complexbibdef,specialbibinsert}
diff --git a/tex/context/base/bibl-tra.mkiv b/tex/context/base/bibl-tra.mkiv
index 4ebc817db..656e80217 100644
--- a/tex/context/base/bibl-tra.mkiv
+++ b/tex/context/base/bibl-tra.mkiv
@@ -804,6 +804,8 @@
\def\bibinsertcrossref#1#2#3%
{\bibdoifelse\@@pb@crossref{#1\cite[\@@pb@crossref]#2}{#3}}
+\let\insertcrossref\gobblethreearguments
+
\appendtoks\let\insertcrossref\bibinsertcrossref\to\initializebibdefinitions
%D The next macro is needed because the number command of the
diff --git a/tex/context/base/buff-ini.lua b/tex/context/base/buff-ini.lua
index 2baae18b0..4ae4df699 100644
--- a/tex/context/base/buff-ini.lua
+++ b/tex/context/base/buff-ini.lua
@@ -32,9 +32,10 @@ local ctxcatcodes = tex.ctxcatcodes
local variables = interfaces.variables
local lpegmatch = lpeg.match
local settings_to_array = utilities.parsers.settings_to_array
+local allocate = utilities.storage.allocate
buffers = {
- data = { },
+ data = allocate(),
hooks = { },
flags = { },
commands = { },
@@ -66,7 +67,7 @@ function buffers.append(name, str)
data[name] = (data[name] or "") .. str
end
-buffers.flags.store_as_table = true
+buffers.flags.storeastable = true
-- to be sorted out: crlf + \ ; slow now
@@ -89,7 +90,7 @@ function buffers.grab(name,begintag,endtag,bufferdata)
dn = dn .. "\n" .. sub(bufferdata,1,#bufferdata-1)
end
dn = gsub(dn,"[\010\013]$","")
- if flags.store_as_table then
+ if flags.storeastable then
dn = dn:splitlines()
end
end
@@ -105,8 +106,8 @@ function buffers.doifelsebuffer(name)
commands.testcase(data[name] ~= nil)
end
-flags.optimize_verbatim = true
-flags.count_empty_lines = false
+flags.optimizeverbatim = true
+flags.countemptylines = false
local no_break_command = "\\doverbatimnobreak"
local do_break_command = "\\doverbatimgoodbreak"
@@ -120,7 +121,7 @@ local begin_of_inline_command = "\\doverbatimbeginofinline"
local end_of_inline_command = "\\doverbatimendofinline"
function buffers.verbatimbreak(n,m)
- if flags.optimize_verbatim then
+ if flags.optimizeverbatim then
if n == 2 or n == m then
texsprint(no_break_command)
elseif n > 1 then
@@ -265,7 +266,7 @@ function buffers.typeline(str,n,m,line)
hooks.flush_line(hooks.line(str))
hooks.end_of_line()
else
- if flags.count_empty_lines then
+ if flags.countemptylines then
line = line + 1
end
hooks.empty_line(line)
diff --git a/tex/context/base/buff-ver.mkiv b/tex/context/base/buff-ver.mkiv
index 98430363c..6b80fbb7a 100644
--- a/tex/context/base/buff-ver.mkiv
+++ b/tex/context/base/buff-ver.mkiv
@@ -1186,10 +1186,12 @@
\dokeepverbatimlinedata
\doopenupverbatimline
\the\everyline\strut
+ \dostarttagged\t!verbatimline\empty
}%\beginverbatimline}
\def\doverbatimendofline
{%\endverbatimline
+ \dostoptagged
\global\lineparfalse
\obeyedline\par
\attribute\verbatimlineattribute\attributeunsetvalue}
diff --git a/tex/context/base/catc-ctx.tex b/tex/context/base/catc-ctx.tex
index 21e7d0136..e2c35c42b 100644
--- a/tex/context/base/catc-ctx.tex
+++ b/tex/context/base/catc-ctx.tex
@@ -15,10 +15,11 @@
%D of everywhere around.
\ifdefined \ctxcatcodes \else \newcatcodetable \ctxcatcodes \fi
+\ifdefined \txtcatcodes \else \newcatcodetable \txtcatcodes \fi
\ifdefined \mthcatcodes \else \newcatcodetable \mthcatcodes \fi % math, not used, too tricky
-\ifdefined \xmlcatcodesn \else \newcatcodetable \xmlcatcodesn \fi % normal
-\ifdefined \xmlcatcodese \else \newcatcodetable \xmlcatcodese \fi % entitle
-\ifdefined \xmlcatcodesr \else \newcatcodetable \xmlcatcodesr \fi % reduce
+% \ifdefined \xmlcatcodesn \else \newcatcodetable \xmlcatcodesn \fi % normal
+% \ifdefined \xmlcatcodese \else \newcatcodetable \xmlcatcodese \fi % entitle
+% \ifdefined \xmlcatcodesr \else \newcatcodetable \xmlcatcodesr \fi % reduce
\ifdefined \typcatcodesa \else \newcatcodetable \typcatcodesa \fi % { }
\ifdefined \typcatcodesb \else \newcatcodetable \typcatcodesb \fi % < >
@@ -84,64 +85,64 @@
%\catcode`\| = 13
\stopcatcodetable
-\startcatcodetable \xmlcatcodesn
- \catcode`\^^I = 10 % ascii tab is a blank space
- \catcode`\^^M = 5 % ascii return is end-line
- \catcode`\^^L = 5 % ascii form-feed
- \catcode`\ = 10 % ascii space is blank space
- \catcode`\^^Z = 9 % ascii eof is ignored
- \catcode`\& = 13 % entity
- \catcode`\< = 13 % element
- \catcode`\> = 12
- \catcode`\" = 12 % probably not needed any more
- \catcode`\/ = 12 % probably not needed any more
- \catcode`\' = 12 % probably not needed any more
- \catcode`\~ = 12 % probably not needed any more
- \catcode`\# = 12 % probably not needed any more
- \catcode`\\ = 12 % probably not needed any more
-\stopcatcodetable
+% \startcatcodetable \xmlcatcodesn
+% \catcode`\^^I = 10 % ascii tab is a blank space
+% \catcode`\^^M = 5 % ascii return is end-line
+% \catcode`\^^L = 5 % ascii form-feed
+% \catcode`\ = 10 % ascii space is blank space
+% \catcode`\^^Z = 9 % ascii eof is ignored
+% \catcode`\& = 13 % entity
+% \catcode`\< = 13 % element
+% \catcode`\> = 12
+% \catcode`\" = 12 % probably not needed any more
+% \catcode`\/ = 12 % probably not needed any more
+% \catcode`\' = 12 % probably not needed any more
+% \catcode`\~ = 12 % probably not needed any more
+% \catcode`\# = 12 % probably not needed any more
+% \catcode`\\ = 12 % probably not needed any more
+% \stopcatcodetable
-\startcatcodetable \xmlcatcodese
- \catcode`\^^I = 10 % ascii tab is a blank space
- \catcode`\^^M = 5 % ascii return is end-line
- \catcode`\^^L = 5 % ascii form-feed
- \catcode`\ = 10 % ascii space is blank space
- \catcode`\^^Z = 9 % ascii eof is ignored
- \catcode`\& = 13 % entity
- \catcode`\< = 13 % element
- \catcode`\> = 12
- \catcode`\# = 13
- \catcode`\$ = 13
- \catcode`\% = 13
- \catcode`\\ = 13
- \catcode`\^ = 13
- \catcode`\_ = 13
- \catcode`\{ = 13
- \catcode`\} = 13
- \catcode`\| = 13
- \catcode`\~ = 13
-\stopcatcodetable
+% \startcatcodetable \xmlcatcodese
+% \catcode`\^^I = 10 % ascii tab is a blank space
+% \catcode`\^^M = 5 % ascii return is end-line
+% \catcode`\^^L = 5 % ascii form-feed
+% \catcode`\ = 10 % ascii space is blank space
+% \catcode`\^^Z = 9 % ascii eof is ignored
+% \catcode`\& = 13 % entity
+% \catcode`\< = 13 % element
+% \catcode`\> = 12
+% \catcode`\# = 13
+% \catcode`\$ = 13
+% \catcode`\% = 13
+% \catcode`\\ = 13
+% \catcode`\^ = 13
+% \catcode`\_ = 13
+% \catcode`\{ = 13
+% \catcode`\} = 13
+% \catcode`\| = 13
+% \catcode`\~ = 13
+% \stopcatcodetable
-\startcatcodetable \xmlcatcodesr
- \catcode`\^^I = 10 % ascii tab is a blank space
- \catcode`\^^M = 5 % ascii return is end-line
- \catcode`\^^L = 5 % ascii form-feed
- \catcode`\ = 10 % ascii space is blank space
- \catcode`\^^Z = 9 % ascii eof is ignored
- \catcode`\& = 13 % entity
- \catcode`\< = 13 % element
- \catcode`\> = 12
- \catcode`\# = 13
- \catcode`\$ = 13
- \catcode`\% = 13
- \catcode`\\ = 13
- \catcode`\^ = 13
- \catcode`\_ = 13
- \catcode`\{ = 13
- \catcode`\} = 13
- \catcode`\| = 13
- \catcode`\~ = 13
-\stopcatcodetable
+% \startcatcodetable \xmlcatcodesr
+% \catcode`\^^I = 10 % ascii tab is a blank space
+% \catcode`\^^M = 5 % ascii return is end-line
+% \catcode`\^^L = 5 % ascii form-feed
+% \catcode`\ = 10 % ascii space is blank space
+% \catcode`\^^Z = 9 % ascii eof is ignored
+% \catcode`\& = 13 % entity
+% \catcode`\< = 13 % element
+% \catcode`\> = 12
+% \catcode`\# = 13
+% \catcode`\$ = 13
+% \catcode`\% = 13
+% \catcode`\\ = 13
+% \catcode`\^ = 13
+% \catcode`\_ = 13
+% \catcode`\{ = 13
+% \catcode`\} = 13
+% \catcode`\| = 13
+% \catcode`\~ = 13
+% \stopcatcodetable
\startcatcodetable \typcatcodesa
\catcode`\^^I = 12
@@ -163,46 +164,67 @@
\catcode`\> = 2
\stopcatcodetable
+\startcatcodetable \txtcatcodes
+ \catcode`\^^I = 10
+ \catcode`\^^M = 5
+ \catcode`\^^L = 5
+ \catcode`\ = 10
+ \catcode`\^^Z = 9
+ \catcode`\\ = 0
+ \catcode`\{ = 1
+ \catcode`\} = 2
+ \catcode`\% = 14
+\stopcatcodetable
+
\letcatcodecommand \ctxcatcodes `\| \relax
\letcatcodecommand \ctxcatcodes `\~ \relax
%letcatcodecommand \prtcatcodes `\| \relax % falls back on ctx
%letcatcodecommand \prtcatcodes `\~ \relax % falls back on ctx
-\letcatcodecommand \xmlcatcodesn `\& \relax
-\letcatcodecommand \xmlcatcodesn `\< \relax
-
-\letcatcodecommand \xmlcatcodese `\& \relax
-\letcatcodecommand \xmlcatcodese `\< \relax
-
-\letcatcodecommand \xmlcatcodesr `\& \relax
-\letcatcodecommand \xmlcatcodesr `\< \relax
-
-\letcatcodecommand \xmlcatcodese `\# \relax
-\letcatcodecommand \xmlcatcodese `\$ \relax
-\letcatcodecommand \xmlcatcodese `\% \relax
-\letcatcodecommand \xmlcatcodese `\\ \relax
-\letcatcodecommand \xmlcatcodese `\^ \relax
-\letcatcodecommand \xmlcatcodese `\_ \relax
-\letcatcodecommand \xmlcatcodese `\{ \relax
-\letcatcodecommand \xmlcatcodese `\} \relax
-\letcatcodecommand \xmlcatcodese `\| \relax
-\letcatcodecommand \xmlcatcodese `\~ \relax
-
-\letcatcodecommand \xmlcatcodesr `\# \relax
-\letcatcodecommand \xmlcatcodesr `\$ \relax
-\letcatcodecommand \xmlcatcodesr `\% \relax
-\letcatcodecommand \xmlcatcodesr `\\ \relax
-\letcatcodecommand \xmlcatcodesr `\^ \relax
-\letcatcodecommand \xmlcatcodesr `\_ \relax
-\letcatcodecommand \xmlcatcodesr `\{ \relax
-\letcatcodecommand \xmlcatcodesr `\} \relax
-\letcatcodecommand \xmlcatcodesr `\| \relax
-\letcatcodecommand \xmlcatcodesr `\~ \relax
-
- \catcodetable \ctxcatcodes
-\let\defaultcatcodetable\ctxcatcodes
-\let\xmlcatcodes \xmlcatcodesn % beware, in mkiv we use \notcatcodes
+% \letcatcodecommand \xmlcatcodesn `\& \relax
+% \letcatcodecommand \xmlcatcodesn `\< \relax
+
+% \letcatcodecommand \xmlcatcodese `\& \relax
+% \letcatcodecommand \xmlcatcodese `\< \relax
+
+% \letcatcodecommand \xmlcatcodesr `\& \relax
+% \letcatcodecommand \xmlcatcodesr `\< \relax
+
+% \letcatcodecommand \xmlcatcodese `\# \relax
+% \letcatcodecommand \xmlcatcodese `\$ \relax
+% \letcatcodecommand \xmlcatcodese `\% \relax
+% \letcatcodecommand \xmlcatcodese `\\ \relax
+% \letcatcodecommand \xmlcatcodese `\^ \relax
+% \letcatcodecommand \xmlcatcodese `\_ \relax
+% \letcatcodecommand \xmlcatcodese `\{ \relax
+% \letcatcodecommand \xmlcatcodese `\} \relax
+% \letcatcodecommand \xmlcatcodese `\| \relax
+% \letcatcodecommand \xmlcatcodese `\~ \relax
+
+% \letcatcodecommand \xmlcatcodesr `\# \relax
+% \letcatcodecommand \xmlcatcodesr `\$ \relax
+% \letcatcodecommand \xmlcatcodesr `\% \relax
+% \letcatcodecommand \xmlcatcodesr `\\ \relax
+% \letcatcodecommand \xmlcatcodesr `\^ \relax
+% \letcatcodecommand \xmlcatcodesr `\_ \relax
+% \letcatcodecommand \xmlcatcodesr `\{ \relax
+% \letcatcodecommand \xmlcatcodesr `\} \relax
+% \letcatcodecommand \xmlcatcodesr `\| \relax
+% \letcatcodecommand \xmlcatcodesr `\~ \relax
+
+ \catcodetable \ctxcatcodes
+\let \defaultcatcodetable \ctxcatcodes
+%\let\xmlcatcodes \xmlcatcodesn % beware, in mkiv we use \notcatcodes
+
+% for the moment here:
+
+\def\starttexcode
+ {\pushcatcodetable
+ \catcodetable\prtcatcodes}
+
+\def\stoptexcode
+ {\popcatcodetable}
\endinput
diff --git a/tex/context/base/catc-xml.tex b/tex/context/base/catc-xml.tex
new file mode 100644
index 000000000..20a241f26
--- /dev/null
+++ b/tex/context/base/catc-xml.tex
@@ -0,0 +1,137 @@
+%D \module
+%D [ file=catc-xml,
+%D version=2006.09.18,
+%D title=\CONTEXT\ Catcode Macros,
+%D subtitle=\XML\ Catcode Tables,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Catcode Regimes / XML}
+
+\ifdefined \xmlcatcodesn \else \newcatcodetable \xmlcatcodesn \fi % normal
+\ifdefined \xmlcatcodese \else \newcatcodetable \xmlcatcodese \fi % entitle
+\ifdefined \xmlcatcodesr \else \newcatcodetable \xmlcatcodesr \fi % reduce
+
+\startcatcodetable \xmlcatcodesn
+ \catcode`\^^I = 10 % ascii tab is a blank space
+ \catcode`\^^M = 5 % ascii return is end-line
+ \catcode`\^^L = 5 % ascii form-feed
+ \catcode`\ = 10 % ascii space is blank space
+ \catcode`\^^Z = 9 % ascii eof is ignored
+ \catcode`\& = 13 % entity
+ \catcode`\< = 13 % element
+ \catcode`\> = 12
+ \catcode`\" = 12 % probably not needed any more
+ \catcode`\/ = 12 % probably not needed any more
+ \catcode`\' = 12 % probably not needed any more
+ \catcode`\~ = 12 % probably not needed any more
+ \catcode`\# = 12 % probably not needed any more
+ \catcode`\\ = 12 % probably not needed any more
+\stopcatcodetable
+
+\startcatcodetable \xmlcatcodese
+ \catcode`\^^I = 10 % ascii tab is a blank space
+ \catcode`\^^M = 5 % ascii return is end-line
+ \catcode`\^^L = 5 % ascii form-feed
+ \catcode`\ = 10 % ascii space is blank space
+ \catcode`\^^Z = 9 % ascii eof is ignored
+ \catcode`\& = 13 % entity
+ \catcode`\< = 13 % element
+ \catcode`\> = 12
+ \catcode`\# = 13
+ \catcode`\$ = 13
+ \catcode`\% = 13
+ \catcode`\\ = 13
+ \catcode`\^ = 13
+ \catcode`\_ = 13
+ \catcode`\{ = 13
+ \catcode`\} = 13
+ \catcode`\| = 13
+ \catcode`\~ = 13
+\stopcatcodetable
+
+\startcatcodetable \xmlcatcodesr
+ \catcode`\^^I = 10 % ascii tab is a blank space
+ \catcode`\^^M = 5 % ascii return is end-line
+ \catcode`\^^L = 5 % ascii form-feed
+ \catcode`\ = 10 % ascii space is blank space
+ \catcode`\^^Z = 9 % ascii eof is ignored
+ \catcode`\& = 13 % entity
+ \catcode`\< = 13 % element
+ \catcode`\> = 12
+ \catcode`\# = 13
+ \catcode`\$ = 13
+ \catcode`\% = 13
+ \catcode`\\ = 13
+ \catcode`\^ = 13
+ \catcode`\_ = 13
+ \catcode`\{ = 13
+ \catcode`\} = 13
+ \catcode`\| = 13
+ \catcode`\~ = 13
+\stopcatcodetable
+
+%D Next we hook in some active character definitions.
+
+\letcatcodecommand \xmlcatcodesn `\& \relax
+\letcatcodecommand \xmlcatcodesn `\< \relax
+
+\letcatcodecommand \xmlcatcodese `\& \relax
+\letcatcodecommand \xmlcatcodese `\< \relax
+
+\letcatcodecommand \xmlcatcodesr `\& \relax
+\letcatcodecommand \xmlcatcodesr `\< \relax
+
+\letcatcodecommand \xmlcatcodese `\# \relax
+\letcatcodecommand \xmlcatcodese `\$ \relax
+\letcatcodecommand \xmlcatcodese `\% \relax
+\letcatcodecommand \xmlcatcodese `\\ \relax
+\letcatcodecommand \xmlcatcodese `\^ \relax
+\letcatcodecommand \xmlcatcodese `\_ \relax
+\letcatcodecommand \xmlcatcodese `\{ \relax
+\letcatcodecommand \xmlcatcodese `\} \relax
+\letcatcodecommand \xmlcatcodese `\| \relax
+\letcatcodecommand \xmlcatcodese `\~ \relax
+
+\letcatcodecommand \xmlcatcodesr `\# \relax
+\letcatcodecommand \xmlcatcodesr `\$ \relax
+\letcatcodecommand \xmlcatcodesr `\% \relax
+\letcatcodecommand \xmlcatcodesr `\\ \relax
+\letcatcodecommand \xmlcatcodesr `\^ \relax
+\letcatcodecommand \xmlcatcodesr `\_ \relax
+\letcatcodecommand \xmlcatcodesr `\{ \relax
+\letcatcodecommand \xmlcatcodesr `\} \relax
+\letcatcodecommand \xmlcatcodesr `\| \relax
+\letcatcodecommand \xmlcatcodesr `\~ \relax
+
+\let\xmlcatcodes \xmlcatcodesn % beware, in mkiv we use \notcatcodes
+
+%D We register the catcodetables at the \LUA\ end where some further
+%D initializations take place.
+
+\ifnum\texengine=\luatexengine
+
+ \ctxlua {
+ characters.define(
+ { % letter catcodes
+ \number\xmlcatcodesn,
+ \number\xmlcatcodese,
+ \number\xmlcatcodesr,
+ },
+ { % activate catcodes
+ \number\xmlcatcodesn,
+ \number\xmlcatcodese,
+ \number\xmlcatcodesr,
+ }
+ )
+ catcodes.register("xmlcatcodes",\number\xmlcatcodes)
+ }
+
+\fi
+
+\endinput
diff --git a/tex/context/base/char-cmp.lua b/tex/context/base/char-cmp.lua
index 848eeebee..1f414d502 100644
--- a/tex/context/base/char-cmp.lua
+++ b/tex/context/base/char-cmp.lua
@@ -11,17 +11,19 @@ local utf = unicode.utf8
local utfchar = utf.char
local unpack = unpack or table.unpack
+local allocate = utilities.storage.allocate
+
characters = characters or { }
local characters = characters
-characters.uncomposed = characters.uncomposed or { }
+characters.uncomposed = allocate()
local uncomposed = characters.uncomposed
--[[ldx--
<p>The code defined here may move to the big character table.</p>
--ldx]]--
-characters.basedigits = {
+characters.basedigits = allocate {
['zero'] = 48, ['one'] = 49,
['two'] = 50, ['three'] = 51,
['four'] = 52, ['five'] = 53,
@@ -50,7 +52,7 @@ Of course they may come in handy elsewhere too</p>
-- => shcode == { ub('a') }
-- => reduction = "a"
-uncomposed.left = {
+uncomposed.left = allocate {
AEligature = "A", aeligature = "a",
OEligature = "O", oeligature = "o",
IJligature = "I", ijligature = "i",
@@ -60,7 +62,7 @@ uncomposed.left = {
Ssharp = "S", ssharp = "s",
}
-uncomposed.right = {
+uncomposed.right = allocate {
AEligature = "E", aeligature = "e",
OEligature = "E", oeligature = "e",
IJligature = "J", ijligature = "j",
@@ -70,7 +72,7 @@ uncomposed.right = {
Ssharp = "S", ssharp = "s",
}
-uncomposed.both = {
+uncomposed.both = allocate {
Acircumflex = "A", acircumflex = "a",
Ccircumflex = "C", ccircumflex = "c",
Ecircumflex = "E", ecircumflex = "e",
@@ -222,7 +224,7 @@ is that a character can be in an encoding twice but is hashed
once.</p>
--ldx]]--
-characters.ligatures = {
+characters.ligatures = allocate {
['f'] = {
{ 'f', 'ff' },
{ 'i', 'fi' },
@@ -245,7 +247,7 @@ characters.ligatures = {
},
}
-characters.texligatures = {
+characters.texligatures = allocate {
-- ['space'] = {
-- { 'L', 'Lslash' },
-- { 'l', 'lslash' }
diff --git a/tex/context/base/char-enc.lua b/tex/context/base/char-enc.lua
index 8addff0e8..bdca9582c 100644
--- a/tex/context/base/char-enc.lua
+++ b/tex/context/base/char-enc.lua
@@ -8,10 +8,12 @@ if not modules then modules = { } end modules ['char-syn'] = {
-- thanks to tex4ht for these mappings
+local allocate = utilities.storage.allocate
+
characters = characters or { }
local characters = characters
-characters.synonyms = {
+characters.synonyms = allocate {
angle = 0x2220,
anticlockwise = 0x21BA,
arrowaxisleft = 0x2190,
diff --git a/tex/context/base/char-ini.lua b/tex/context/base/char-ini.lua
index a24de6e23..4893875c3 100644
--- a/tex/context/base/char-ini.lua
+++ b/tex/context/base/char-ini.lua
@@ -14,10 +14,17 @@ local concat = table.concat
local next, tonumber = next, tonumber
local texsprint, texprint = tex.sprint, tex.print
local format, lower, gsub, match, gmatch = string.format, string.lower, string.gsub, string.match, string.match, string.gmatch
+local texsetlccode, texsetuccode, texsetsfcode, texsetcatcode = tex.setlccode, tex.setuccode, tex.setsfcode, tex.setcatcode
+
+local allocate, mark = utilities.storage.allocate, utilities.storage.mark
local ctxcatcodes = tex.ctxcatcodes
local texcatcodes = tex.texcatcodes
+local trace_defining = false trackers.register("characters.defining", function(v) characters_defining = v end)
+
+local report_defining = logs.new("characters")
+
--[[ldx--
<p>This module implements some methods and creates additional datastructured
from the big character table that we use for all kind of purposes:
@@ -27,15 +34,23 @@ from the big character table that we use for all kind of purposes:
loaded!</p>
--ldx]]--
-characters = characters or { }
+characters = characters or { }
local characters = characters
-characters.data = characters.data or { }
+
local data = characters.data
+if data then
+ mark(data) -- why does this fail
+else
+ report_defining("fatal error: 'char-def.lua' is not loaded")
+ os.exit()
+end
+
if not characters.ranges then
- characters.ranges = { }
+ local ranges = allocate { }
+ characters.ranges = ranges
for k, v in next, data do
- characters.ranges[#characters.ranges+1] = k
+ ranges[#ranges+1] = k
end
end
@@ -43,20 +58,18 @@ storage.register("characters/ranges",characters.ranges,"characters.ranges")
local ranges = characters.ranges
-setmetatable(data, {
- __index = function(t,k)
- for r=1,#ranges do
- local rr = ranges[r] -- first in range
- if k > rr and k <= data[rr].range then
- t[k] = t[rr]
- return t[k]
- end
+setmetatablekey(data, "__index", function(t,k)
+ for r=1,#ranges do
+ local rr = ranges[r] -- first in range
+ if k > rr and k <= data[rr].range then
+ t[k] = t[rr]
+ return t[k]
end
- return nil
end
-})
+ return nil
+end )
-characters.blocks = {
+characters.blocks = allocate {
["aegeannumbers"] = { 0x10100, 0x1013F, "Aegean Numbers" },
["alphabeticpresentationforms"] = { 0x0FB00, 0x0FB4F, "Alphabetic Presentation Forms" },
["ancientgreekmusicalnotation"] = { 0x1D200, 0x1D24F, "Ancient Greek Musical Notation" },
@@ -249,7 +262,7 @@ function characters.getrange(name)
return slot, slot, nil
end
-characters.categories = {
+characters.categories = allocate {
lu = "Letter Uppercase",
ll = "Letter Lowercase",
lt = "Letter Titlecase",
@@ -285,22 +298,26 @@ characters.categories = {
--~ special : cf (softhyphen) zs (emspace)
--~ characters: ll lm lo lt lu mn nl no pc pd pe pf pi po ps sc sk sm so
-characters.is_character = table.tohash {
+local is_character = allocate ( table.tohash {
"lu","ll","lt","lm","lo",
"nd","nl","no",
"mn",
"nl","no",
"pc","pd","ps","pe","pi","pf","po",
"sm","sc","sk","so"
-}
+} )
-characters.is_letter = table.tohash {
+local is_letter = allocate ( table.tohash {
"ll","lm","lo","lt","lu"
-}
+} )
-characters.is_command = table.tohash {
+local is_command = allocate ( table.tohash {
"cf","zs"
-}
+} )
+
+characters.is_character = is_character
+characters.is_letter = is_letter
+characters.is_command = is_command
-- linebreak: todo: hash
--
@@ -311,7 +328,7 @@ characters.is_command = table.tohash {
--
-- N A H W F Na
-characters.bidi = {
+characters.bidi = allocate {
l = "Left-to-Right",
lre = "Left-to-Right Embedding",
lro = "Left-to-Right Override",
@@ -360,8 +377,8 @@ if not characters.fallbacks then
end
-storage.register("characters.fallbacks", characters.fallbacks, "characters.fallbacks")
-storage.register("characters.directions", characters.directions, "characters.directions")
+storage.register("characters/fallbacks", characters.fallbacks, "characters.fallbacks")
+storage.register("characters/directions", characters.directions, "characters.directions")
--[[ldx--
<p>The <type>context</type> namespace is used to store methods and data
@@ -381,74 +398,155 @@ function tex.uprint(n)
texsprint(ctxcatcodes,utfchar(n))
end
-local template_a = "\\startextendcatcodetable{%s}\\chardef\\l=11\\chardef\\a=13\\let\\c\\catcode%s\\let\\a\\undefined\\let\\l\\undefined\\let\\c\\undefined\\stopextendcatcodetable"
-local template_b = "\\chardef\\l=11\\chardef\\a=13\\let\\c\\catcode%s\\let\\a\\undefined\\let\\l\\undefined\\let\\c\\undefined"
-
--- we need a function for setting the codes ....
-
-function characters.define(tobelettered, tobeactivated) -- catcodetables
- local is_character, is_command, is_letter = characters.is_character, characters.is_command, characters.is_letter
- local lettered, activated = { }, { }
- for u, chr in next, data do
- -- we can use a macro instead of direct settings
- local fallback = chr.fallback
- if fallback then
- -- texprint(format("{\\catcode %s=13\\unexpanded\\gdef %s{\\checkedchar{%s}{%s}}}",u,utfchar(u),u,fallback))
- texsprint("{\\catcode",u,"=13\\unexpanded\\gdef ",utfchar(u),"{\\checkedchar{",u,"}{",fallback,"}}}") -- no texprint
- activated[#activated+1] = "\\c"..u.."\\a"
- else
- local contextname = chr.contextname
- local category = chr.category
- if contextname then
- if is_character[category] then
- -- by this time, we're still in normal catcode mode
- -- subtle: not "\\",contextname but "\\"..contextname
- if chr.unicodeslot < 128 then
- -- texprint(ctxcatcodes, "\\chardef\\"..contextname,"=",u)
- texprint(ctxcatcodes,format("\\chardef\\%s=%s",contextname,u))
- else
- -- texprint(ctxcatcodes, "\\let\\"..contextname,"=",utfchar(u))
- texprint(ctxcatcodes,format("\\let\\%s=%s",contextname,utfchar(u)))
- if is_letter[category] then
- lettered[#lettered+1] = "\\c"..u.."\\l"
+if texsetcatcode then
+
+ -- todo -- define per table and then also register name (for tracing)
+
+ function characters.define(tobelettered, tobeactivated) -- catcodetables
+
+ if trace_defining then
+ report_defining("defining active character commands")
+ end
+
+ local activated = { }
+
+ for u, chr in next, data do -- these will be commands
+ local fallback = chr.fallback
+ if fallback then
+ texsprint("{\\catcode",u,"=13\\unexpanded\\gdef ",utfchar(u),"{\\checkedchar{",u,"}{",fallback,"}}}") -- no texprint
+ activated[#activated+1] = u
+ else
+ local contextname = chr.contextname
+ if contextname then
+ local category = chr.category
+ if is_character[category] then
+ if chr.unicodeslot < 128 then
+ texprint(ctxcatcodes,format("\\chardef\\%s=%s",contextname,u))
+ else
+ texprint(ctxcatcodes,format("\\let\\%s=%s",contextname,utfchar(u)))
end
+ elseif is_command[category] then
+ texsprint("{\\catcode",u,"=13\\unexpanded\\gdef ",utfchar(u),"{\\"..contextname,"}}") -- no texprint
+ activated[#activated+1] = u
end
- elseif is_command[category] then
- -- this might change: contextcommand ipv contextname
- -- texprint(format("{\\catcode %s=13\\unexpanded\\gdef %s{\\%s}}",u,utfchar(u),contextname))
- texsprint("{\\catcode",u,"=13\\unexpanded\\gdef ",utfchar(u),"{\\"..contextname,"}}") -- no texprint
- activated[#activated+1] = "\\c"..u.."\\a"
- end
- elseif is_letter[category] then
- if u >= 128 and u <= 65536 then -- catch private mess
- lettered[#lettered+1] = "\\c"..u.."\\l"
end
end
end
- if chr.range then
- lettered[#lettered+1] = format('\\dofastrecurse{"%05X}{"%05X}{1}{\\c\\fastrecursecounter\\l}',u,chr.range)
+
+ if tobelettered then -- shared
+ -- local saved = tex.catcodetable
+ -- for i=1,#tobelettered do
+ -- tex.catcodetable = tobelettered[i]
+ if trace_defining then
+ report_defining("defining letters (global, shared)")
+ end
+ for u, chr in next, data do
+ if not chr.fallback and is_letter[chr.category] and u >= 128 and u <= 65536 then
+ texsetcatcode(u,11)
+ end
+ if chr.range then
+ for i=1,u,chr.range do
+ texsetcatcode(i,11)
+ end
+ end
+ end
+ texsetcatcode(0x200C,11) -- non-joiner
+ texsetcatcode(0x200D,11) -- joiner
+ -- end
+ -- tex.catcodetable = saved
end
- end
- -- if false then
- lettered[#lettered+1] = "\\c"..0x200C.."\\l" -- non-joiner
- lettered[#lettered+1] = "\\c"..0x200D.."\\l" -- joiner
- -- fi
- if tobelettered then
- lettered = concat(lettered)
- if true then
- texsprint(ctxcatcodes,format(template_b,lettered))
- else
- for l=1,#tobelettered do
- texsprint(ctxcatcodes,format(template_a,tobelettered[l],lettered))
+
+ local nofactivated = #tobeactivated
+ if tobeactivated and nofactivated > 0 then
+ for i=1,nofactivated do
+ local u = activated[i]
+ report_defining("character 0x%05X is active in sets %s (%s)",u,concat(tobeactivated,","),data[u].description)
+ end
+ local saved = tex.catcodetable
+ for i=1,#tobeactivated do
+ local vector = tobeactivated[i]
+ if trace_defining then
+ report_defining("defining %s active characters in vector %s",nofactivated,vector)
+ end
+ tex.catcodetable = vector
+ for i=1,nofactivated do
+ texsetcatcode(activated[i],13)
+ end
end
+ tex.catcodetable = saved
end
+
end
- if tobeactivated then
- activated = concat(activated)
- for a=1,#tobeactivated do
- texsprint(ctxcatcodes,format(template_a,tobeactivated[a],activated))
+
+else -- keep this
+
+ local template_a = "\\startextendcatcodetable{%s}\\chardef\\l=11\\chardef\\a=13\\let\\c\\catcode%s\\let\\a\\undefined\\let\\l\\undefined\\let\\c\\undefined\\stopextendcatcodetable"
+ local template_b = "\\chardef\\l=11\\chardef\\a=13\\let\\c\\catcode%s\\let\\a\\undefined\\let\\l\\undefined\\let\\c\\undefined"
+
+ function characters.define(tobelettered, tobeactivated) -- catcodetables
+ local lettered, activated = { }, { }
+ for u, chr in next, data do
+ -- we can use a macro instead of direct settings
+ local fallback = chr.fallback
+ if fallback then
+ -- texprint(format("{\\catcode %s=13\\unexpanded\\gdef %s{\\checkedchar{%s}{%s}}}",u,utfchar(u),u,fallback))
+ texsprint("{\\catcode",u,"=13\\unexpanded\\gdef ",utfchar(u),"{\\checkedchar{",u,"}{",fallback,"}}}") -- no texprint
+ activated[#activated+1] = "\\c"..u.."\\a"
+ else
+ local contextname = chr.contextname
+ local category = chr.category
+ if contextname then
+ if is_character[category] then
+ -- by this time, we're still in normal catcode mode
+ -- subtle: not "\\",contextname but "\\"..contextname
+ if chr.unicodeslot < 128 then
+ -- texprint(ctxcatcodes, "\\chardef\\"..contextname,"=",u)
+ texprint(ctxcatcodes,format("\\chardef\\%s=%s",contextname,u))
+ else
+ -- texprint(ctxcatcodes, "\\let\\"..contextname,"=",utfchar(u))
+ texprint(ctxcatcodes,format("\\let\\%s=%s",contextname,utfchar(u)))
+ if is_letter[category] then
+ lettered[#lettered+1] = "\\c"..u.."\\l"
+ end
+ end
+ elseif is_command[category] then
+ -- this might change: contextcommand ipv contextname
+ -- texprint(format("{\\catcode %s=13\\unexpanded\\gdef %s{\\%s}}",u,utfchar(u),contextname))
+ texsprint("{\\catcode",u,"=13\\unexpanded\\gdef ",utfchar(u),"{\\"..contextname,"}}") -- no texprint
+ activated[#activated+1] = "\\c"..u.."\\a"
+ end
+ elseif is_letter[category] then
+ if u >= 128 and u <= 65536 then -- catch private mess
+ lettered[#lettered+1] = "\\c"..u.."\\l"
+ end
+ end
+ end
+ if chr.range then
+ lettered[#lettered+1] = format('\\dofastrecurse{"%05X}{"%05X}{1}{\\c\\fastrecursecounter\\l}',u,chr.range)
+ end
+ end
+ -- if false then
+ lettered[#lettered+1] = "\\c"..0x200C.."\\l" -- non-joiner
+ lettered[#lettered+1] = "\\c"..0x200D.."\\l" -- joiner
+ -- fi
+ if tobelettered then
+ lettered = concat(lettered)
+ if true then
+ texsprint(ctxcatcodes,format(template_b,lettered)) -- global
+ else
+ for l=1,#tobelettered do
+ texsprint(ctxcatcodes,format(template_a,tobelettered[l],lettered))
+ end
+ end
+ end
+ if tobeactivated then
+ activated = concat(activated)
+ for a=1,#tobeactivated do
+ texsprint(ctxcatcodes,format(template_a,tobeactivated[a],activated))
+ end
end
end
+
end
function characters.charcode(box)
@@ -461,24 +559,64 @@ end
<p>Setting the lccodes is also done in a loop over the data table.</p>
--ldx]]--
+--~ function tex.setsfcode (index,sf) ... end
+--~ function tex.setlccode (index,lc,[uc]) ... end -- optional third value, safes call
+--~ function tex.setuccode (index,uc,[lc]) ... end
+--~ function tex.setcatcode(index,cc) ... end
+
-- we need a function ...
-function characters.setcodes()
- for code, chr in next, data do
- local cc = chr.category
- if cc == 'll' or cc == 'lu' or cc == 'lt' then
- local lc, uc = chr.lccode, chr.uccode
- if not lc then chr.lccode, lc = code, code end
- if not uc then chr.uccode, uc = code, code end
- texsprint(ctxcatcodes,format("\\setcclcuc{%i}{%i}{%i}",code,lc,uc))
+--~ tex.lccode
+--~ tex.uccode
+--~ tex.sfcode
+--~ tex.catcode
+
+if texsetcatcode then
+
+ function characters.setcodes()
+ if trace_defining then
+ report_defining("defining lc and uc codes")
end
- if cc == "lu" then
- texprint(ctxcatcodes,"\\sfcode ",code,"999 ")
+ for code, chr in next, data do
+ local cc = chr.category
+ if cc == 'll' or cc == 'lu' or cc == 'lt' then
+ local lc, uc = chr.lccode, chr.uccode
+ if not lc then chr.lccode, lc = code, code end
+ if not uc then chr.uccode, uc = code, code end
+ texsetcatcode(code,11) -- letter
+ texsetlccode(code,lc,uc)
+ if cc == "lu" then
+ texsetsfcode(code,999)
+ end
+ elseif cc == "lo" and chr.range then
+ for i=code,chr.range do
+ texsetcatcode(code,11) -- letter
+ texsetlccode(code,code,code) -- self self
+ end
+ end
end
- if cc == "lo" and chr.range then
- texsprint(ctxcatcodes,format('\\dofastrecurse{"%05X}{"%05X}{1}{\\setcclcucself\\fastrecursecounter}',code,chr.range))
+ end
+
+else -- keep this one
+
+ function characters.setcodes()
+ for code, chr in next, data do
+ local cc = chr.category
+ if cc == 'll' or cc == 'lu' or cc == 'lt' then
+ local lc, uc = chr.lccode, chr.uccode
+ if not lc then chr.lccode, lc = code, code end
+ if not uc then chr.uccode, uc = code, code end
+ texsprint(ctxcatcodes,format("\\setcclcuc{%i}{%i}{%i}",code,lc,uc))
+ end
+ if cc == "lu" then
+ texprint(ctxcatcodes,"\\sfcode ",code,"999 ")
+ end
+ if cc == "lo" and chr.range then
+ texsprint(ctxcatcodes,format('\\dofastrecurse{"%05X}{"%05X}{1}{\\setcclcucself\\fastrecursecounter}',code,chr.range))
+ end
end
end
+
end
--[[ldx--
@@ -649,6 +787,18 @@ function characters.upper(str)
return concat(new)
end
+function characters.lettered(str)
+ local new = { }
+ for u in utfvalues(str) do
+ local d = data[u]
+ if is_letter[d.category] then
+ new[#new+1] = utfchar(d.lccode or u)
+ end
+ end
+ return concat(new)
+end
+
+
-- -- some day we might go this route, but it does not really save that much
-- -- so not now (we can generate a lot using mtx-unicode that operates on the
-- -- database)
diff --git a/tex/context/base/char-ini.mkiv b/tex/context/base/char-ini.mkiv
index 0d5e16bb0..a89c448be 100644
--- a/tex/context/base/char-ini.mkiv
+++ b/tex/context/base/char-ini.mkiv
@@ -56,21 +56,22 @@
\number\mthcatcodes,
\number\vrbcatcodes,
\number\prtcatcodes,
- \number\xmlcatcodesn,
- \number\xmlcatcodese,
- \number\xmlcatcodesr,
+% \number\xmlcatcodesn,
+% \number\xmlcatcodese,
+% \number\xmlcatcodesr,
\number\typcatcodesa,
\number\typcatcodesb,
+ \number\txtcatcodes,
},
{ % activate catcodes
\number\ctxcatcodes,
\number\notcatcodes,
- \number\xmlcatcodesn,
- \number\xmlcatcodese,
- \number\xmlcatcodesr,
+% \number\xmlcatcodesn,
+% \number\xmlcatcodese,
+% \number\xmlcatcodesr,
}
)
- catcodes.register("xmlcatcodes",\number\xmlcatcodes)
+% catcodes.register("xmlcatcodes",\number\xmlcatcodes)
}
\protect \endinput
diff --git a/tex/context/base/char-tex.lua b/tex/context/base/char-tex.lua
index 5a1edb42e..6e57a860a 100644
--- a/tex/context/base/char-tex.lua
+++ b/tex/context/base/char-tex.lua
@@ -12,11 +12,13 @@ local lpeg = lpeg
local P, C, R, S, Cs, Cc = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc
local U, lpegmatch = lpeg.patterns.utf8, lpeg.match
+local allocate, mark = utilities.storage.allocate, utilities.storage.mark
+
characters = characters or { }
local characters = characters
characters.tex = characters.tex or { }
-local accent_map = {
+local accent_map = allocate {
['~'] = "̃" , -- ̃ Ẽ
['"'] = "̈" , -- ̈ Ë
["`"] = "̀" , -- ̀ È
@@ -49,7 +51,7 @@ local function remap_accents(a,c,braced)
end
end
-local command_map = {
+local command_map = allocate {
["i"] = "ı"
}
diff --git a/tex/context/base/char-utf.lua b/tex/context/base/char-utf.lua
index 680d426b0..a0a611e9a 100644
--- a/tex/context/base/char-utf.lua
+++ b/tex/context/base/char-utf.lua
@@ -25,23 +25,24 @@ local concat, gmatch, gsub = table.concat, string.gmatch, string.gsub
local utfcharacters, utfvalues = string.utfcharacters, string.utfvalues
local ctxcatcodes = tex.ctxcatcodes
local texsprint = tex.sprint
+local allocate = utilities.storage.allocate
-- todo: trackers
-characters = characters or { }
-local characters = characters
+characters = characters or { }
+local characters = characters
-characters.graphemes = characters.graphemes or { }
-local graphemes = characters.graphemes
+characters.graphemes = allocate()
+local graphemes = characters.graphemes
-characters.filters = characters.filters or { }
-local filters = characters.filters
+characters.filters = allocate()
+local filters = characters.filters
-filters.utf = filters.utf or { }
-local utffilters = characters.filters.utf
+filters.utf = filters.utf or { }
+local utffilters = characters.filters.utf
-utffilters.collapsing = true
-utffilters.expanding = true
+utffilters.collapsing = true
+utffilters.expanding = true
--[[ldx--
<p>It only makes sense to collapse at runtime, since we don't expect
@@ -123,9 +124,9 @@ to their right glyph there.</p>
0x100000.</p>
--ldx]]--
-local low = { }
-local high = { }
-local escapes = { }
+local low = allocate({ })
+local high = allocate({ })
+local escapes = allocate({ })
local special = "~#$%^&_{}\\|"
local private = {
diff --git a/tex/context/base/char-utf.mkiv b/tex/context/base/char-utf.mkiv
index 16b4029d8..b96aec38d 100644
--- a/tex/context/base/char-utf.mkiv
+++ b/tex/context/base/char-utf.mkiv
@@ -31,7 +31,7 @@
\appendtoks
\ctxlua {
characters.filters.utf.collapsing = true
- resolvers.install_text_filter('utf',characters.filters.utf.collapse)
+ resolvers.filters.install('utf',characters.filters.utf.collapse)
}%
\to \everyjob
diff --git a/tex/context/base/colo-icc.lua b/tex/context/base/colo-icc.lua
index 5df4b9663..fd30b63e4 100644
--- a/tex/context/base/colo-icc.lua
+++ b/tex/context/base/colo-icc.lua
@@ -17,7 +17,7 @@ local invalid = R(char(0)..char(31))
local cleaned = invalid^0 * Cs((1-invalid)^0)
function colors.iccprofile(filename,verbose)
- local fullname = resolvers.find_file(filename,"icc") or ""
+ local fullname = resolvers.findfile(filename,"icc") or ""
if fullname == "" then
local locate = resolvers.finders.loc -- not in mtxrun
if locate then
diff --git a/tex/context/base/cont-new.tex b/tex/context/base/cont-new.tex
index 8c0852955..0fdce6595 100644
--- a/tex/context/base/cont-new.tex
+++ b/tex/context/base/cont-new.tex
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2010.08.20 00:00}
+\newcontextversion{2010.09.03 11:05}
%D This file is loaded at runtime, thereby providing an
%D excellent place for hacks, patches, extensions and new
diff --git a/tex/context/base/context.mkii b/tex/context/base/context.mkii
index 4be60cbfc..36b2cd9e9 100644
--- a/tex/context/base/context.mkii
+++ b/tex/context/base/context.mkii
@@ -29,6 +29,7 @@
\loadcorefile{catc-def}
\loadcorefile{catc-ctx}
\loadcorefile{catc-sym}
+\loadcorefile{catc-xml}
\loadmarkfile{syst-gen}
\loadmarkfile{syst-ext}
diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv
index c8271fcb1..371d363b4 100644
--- a/tex/context/base/context.mkiv
+++ b/tex/context/base/context.mkiv
@@ -169,15 +169,15 @@
\loadmarkfile{strc-ini}
\loadmarkfile{strc-tag}
\loadmarkfile{strc-doc}
+\loadmarkfile{strc-num}
\loadmarkfile{strc-mar}
\loadmarkfile{strc-prc}
\loadmarkfile{strc-sbe}
\loadmarkfile{strc-lst}
\loadmarkfile{strc-sec}
-\loadmarkfile{strc-num}
+\loadmarkfile{strc-pag} % hm, depends on core-num
\loadmarkfile{strc-ren}
\loadmarkfile{strc-xml}
-\loadmarkfile{strc-pag} % hm, depends on core-num
\loadmarkfile{strc-def} % might happen later
\loadmarkfile{strc-ref}
\loadmarkfile{strc-reg}
@@ -339,7 +339,7 @@
\loadmarkfile{bibl-bib}
\loadmarkfile{bibl-tra}
-\loadmarkfile{x-xtag} % at some point this will not be preloaded
+%loadmarkfile{x-xtag} % no longer preloaded
\loadcorefile{meta-xml}
diff --git a/tex/context/base/context.tex b/tex/context/base/context.tex
index 351667d9d..494462f6a 100644
--- a/tex/context/base/context.tex
+++ b/tex/context/base/context.tex
@@ -20,7 +20,7 @@
%D your styles an modules.
\edef\contextformat {\jobname}
-\edef\contextversion{2010.08.20 00:00}
+\edef\contextversion{2010.09.03 11:05}
%D For those who want to use this:
diff --git a/tex/context/base/core-con.lua b/tex/context/base/core-con.lua
index e83f9a487..7c1bb01f9 100644
--- a/tex/context/base/core-con.lua
+++ b/tex/context/base/core-con.lua
@@ -18,11 +18,12 @@ local utf = unicode.utf8
local floor, date, time, concat = math.floor, os.date, os.time, table.concat
local lower, format, rep = string.lower, string.format, string.rep
-local texsprint, utfchar = tex.sprint, utf.char
+local utfchar, utfbyte = utf.char, utf.byte
local tonumber, tostring = tonumber, tostring
-local settings_to_array = utilities.parsers.settings_to_array
-local ctxcatcodes = tex.ctxcatcodes
+local settings_to_array = utilities.parsers.settings_to_array
+local texsprint, ctxcatcodes = tex.sprint, tex.ctxcatcodes
+local allocate = utilities.storage.allocate
converters = converters or { }
local converters = converters
@@ -56,7 +57,9 @@ end
--~ 0x06F5, 0x06F6, 0x06F7, 0x06F8, 0x06F9
--~ },
-languages.counters = {
+-- to be reconsidered ... languages namespace here, might become local plus a register command
+
+languages.counters = allocate {
['**'] = {
0x0061, 0x0062, 0x0063, 0x0064, 0x0065,
0x0066, 0x0067, 0x0068, 0x0069, 0x006A,
@@ -134,13 +137,13 @@ languages.counters = {
local counters = languages.counters
-counters['ar'] = counters['arabic']
-counters['gr'] = counters['greek']
-counters['g'] = counters['greek']
-counters['sl'] = counters['slovenian']
-counters['kr'] = counters['korean']
-counters['kr-p'] = counters['korean-parent']
-counters['kr-c'] = counters['korean-circle']
+counters['ar'] = counters['arabic']
+counters['gr'] = counters['greek']
+counters['g'] = counters['greek']
+counters['sl'] = counters['slovenian']
+counters['kr'] = counters['korean']
+counters['kr-p'] = counters['korean-parent']
+counters['kr-c'] = counters['korean-circle']
local fallback = utf.byte('0')
diff --git a/tex/context/base/core-ini.mkiv b/tex/context/base/core-ini.mkiv
index d6a72bb26..cd2d2e25e 100644
--- a/tex/context/base/core-ini.mkiv
+++ b/tex/context/base/core-ini.mkiv
@@ -56,11 +56,4 @@
%appendtoks \setlastlinewidth \to \everyendofpar % gone, will be done in lua
\appendtoks \endgraf \to \everyendofpar
-% Todo: verbatim, xml, tex, move code to here
-
-\ifx\normalcompound\undefined \let\normalcompound=| \fi
-
-\appendtoks \catcode`|=\@@active \let|\normalcompound \to \everyTEXinputmode
-\appendtoks \catcode`|=\@@letter \to \everyXMLinputmode
-
\protect \endinput
diff --git a/tex/context/base/core-job.lua b/tex/context/base/core-job.lua
index f58344f93..474eb86bf 100644
--- a/tex/context/base/core-job.lua
+++ b/tex/context/base/core-job.lua
@@ -49,7 +49,7 @@ function resolvers.findctxfile(name,maxreadlevel)
end
end
end
- return resolvers.find_file(name) or ""
+ return resolvers.findfile(name) or ""
end
end
@@ -73,12 +73,12 @@ function commands.locatefilepath(name,maxreadlevel)
end
function commands.usepath(paths,maxreadlevel)
- resolvers.register_extra_path(paths)
+ resolvers.registerextrapath(paths)
texsprint(texcatcodes,concat(resolvers.instance.extra_paths or {}, ""))
end
function commands.usesubpath(subpaths,maxreadlevel)
- resolvers.register_extra_path(nil,subpaths)
+ resolvers.registerextrapath(nil,subpaths)
texsprint(texcatcodes,concat(resolvers.instance.extra_paths or {}, ""))
end
@@ -113,13 +113,13 @@ local function convertexamodes(str)
end
end
--- we need a system file option: ,. .. etc + paths but no tex lookup so resolvers.find_file is wrong here
+-- we need a system file option: ,. .. etc + paths but no tex lookup so resolvers.findfile is wrong here
function commands.loadexamodes(filename)
if not filename or filename == "" then
filename = file.removesuffix(tex.jobname)
end
- filename = resolvers.find_file(file.addsuffix(filename,'ctm')) or ""
+ filename = resolvers.findfile(file.addsuffix(filename,'ctm')) or ""
if filename ~= "" then
commands.writestatus("examodes","loading %s",filename) -- todo: message system
convertexamodes(io.loaddata(filename))
diff --git a/tex/context/base/core-job.mkiv b/tex/context/base/core-job.mkiv
index e52aeeaac..85f20db12 100644
--- a/tex/context/base/core-job.mkiv
+++ b/tex/context/base/core-job.mkiv
@@ -65,7 +65,7 @@
\def\registerfileinfo[#1#2]#3% geen \showmessage ?
{\writestatus\m!systems{#1#2 file #3 at line \the\inputlineno}}
-\ifx\preloadfonts \undefined \let\preloadfonts \relax \fi
+\ifdefined\preloadfonts\else \let\preloadfonts\relax \fi
\def\loadallsystemfiles#1#2%
{\ifx\@@svdirectory\empty
@@ -76,8 +76,6 @@
\processcommacommand[\@@svdirectory]\doloadsystemfile
\fi}
-\ifx\disableXML\undefined \let\disableXML\relax \fi
-
\def\loadsystemfiles
{\reportprotectionstate
\readsysfile\f!newfilename{\showmessage\m!systems2\f!newfilename}\donothing
@@ -86,13 +84,11 @@
\donothing
\loadallsystemfiles\f!sysfilename
{\loadallsystemfiles{\f!sysfilename.rme}\donothing % new, fall back
- \doglobal\appendtoks % brrr better \setcatcodetable\ctxcatcodes % % test
- \bgroup\disableXML\loadallsystemfiles\f!errfilename\donothing\egroup
- \to\everygoodbye}}
+ \loadallsystemfiles \f!errfilename \donothing}}
%D We don't want multiple jobfiles to interfere.
-\def\loadoptionfile
+\def\loadoptionfile % todo : mark document.* tables as storage
{\readjobfile{\jobname.\f!optionextension}
{\showmessage\m!systems2{\jobname.\f!optionextension}%
\ctxlua{commands.logoptionfile("\jobname.\f!optionextension")}}%
diff --git a/tex/context/base/core-mis.mkiv b/tex/context/base/core-mis.mkiv
index 6e159532f..88da7fb5e 100644
--- a/tex/context/base/core-mis.mkiv
+++ b/tex/context/base/core-mis.mkiv
@@ -745,23 +745,6 @@
\unexpanded\def\stopsubsentence {\endofsubsentencespacing\prewordbreak\endofsubsentence}
\unexpanded\def\subsentence {\groupedcommand\startsubsentence\stopsubsentence}
-%D \defineXMLenvironment [subsentence]
-%D {|<|}
-%D {|>|}
-%D \defineXMLenvironment [subsentence]
-%D {\directdiscretionary{<}}
-%D {\directdiscretionary{>}}
-%D \defineXMLenvironment [subsentence]
-%D {\startsubsentence}
-%D {\stopsubsentence}
-%D
-%D \startbuffer
-%D test <subsentence>test</subsentence> test
-%D \stopbuffer
-%D
-%D \typebuffer
-%D \processXMLbuffer
-
\enableactivediscretionaries
\definehspace [quotation] [\zeropoint]
diff --git a/tex/context/base/core-two.lua b/tex/context/base/core-two.lua
index 67bf2fc03..c1a55b9f9 100644
--- a/tex/context/base/core-two.lua
+++ b/tex/context/base/core-two.lua
@@ -7,8 +7,10 @@ if not modules then modules = { } end modules ['core-two'] = {
}
local remove, concat = table.remove, table.concat
-
local texprint = tex.print
+local allocate, mark = utilities.storage.allocate, utilities.storage.mark
+
+local collected, tobesaved = allocate(), allocate()
--[[ldx--
<p>We save multi-pass information in the main utility table. This is a
@@ -16,19 +18,18 @@ bit of a mess because we support old and new methods.</p>
--ldx]]--
local jobpasses = {
- collected = { },
- tobesaved = { },
+ collected = collected,
+ tobesaved = tobesaved,
}
job.passes = jobpasses
-local collected, tobesaved = jobpasses.collected, jobpasses.tobesaved
-
local function initializer()
- collected, tobesaved = jobpasses.collected, jobpasses.tobesaved
+ collected = mark(jobpasses.collected)
+ tobesaved = mark(jobpasses.tobesaved)
end
-job.register('job.passes.collected', jobpasses.tobesaved, initializer, nil)
+job.register('job.passes.collected', tobesaved, initializer, nil)
local function allocate(id)
local p = tobesaved[id]
diff --git a/tex/context/base/core-uti.lua b/tex/context/base/core-uti.lua
index c8dc0f73d..1681646df 100644
--- a/tex/context/base/core-uti.lua
+++ b/tex/context/base/core-uti.lua
@@ -22,15 +22,18 @@ local next, type, tostring = next, type, tostring
local texsprint, ctxcatcodes = tex.sprint, tex.ctxcatcodes
local definetable, accesstable = utilities.tables.definetable, utilities.tables.accesstable
local serialize = table.serialize
+local packers = utilities.packers
+local allocate, mark = utilities.storage.allocate, utilities.storage.mark
local report_jobcontrol = logs.new("jobcontrol")
-if not jobs then jobs = { } end
-if not job then jobs['main'] = { } end job = jobs['main']
+job = job or { }
+local job = job
-local packers = utilities.packers
+job.version = 1.14
-jobs.version = 1.14
+-- some day we will implement loading of other jobs and then we need
+-- job.jobs
--[[ldx--
<p>Variables are saved using in the previously defined table and passed
@@ -44,7 +47,7 @@ function job.comment(str)
comment[#comment+1] = str
end
-job.comment(format("version: %1.2f",jobs.version))
+job.comment(format("version: %1.2f",job.version))
function job.initialize(loadname,savename)
job.load(loadname) -- has to come after structure is defined !
@@ -61,21 +64,26 @@ end
-- as an example we implement variables
+local tobesaved, collected, checksums = allocate(), allocate(), allocate()
+
local jobvariables = {
- collected = { },
- tobesaved = { },
- checksums = { },
+ collected = collected,
+ tobesaved = tobesaved,
+ checksums = checksums,
}
job.variables = jobvariables
-if not jobvariables.checksums.old then jobvariables.checksums.old = md5.HEX("old") end -- used in experiment
-if not jobvariables.checksums.new then jobvariables.checksums.new = md5.HEX("new") end -- used in experiment
+if not checksums.old then checksums.old = md5.HEX("old") end -- used in experiment
+if not checksums.new then checksums.new = md5.HEX("new") end -- used in experiment
-job.register('job.variables.checksums', jobvariables.checksums)
+job.register('job.variables.checksums', checksums)
local function initializer()
- local r = jobvariables.collected.randomseed
+ tobesaved = mark(jobvariables.tobesaved)
+ collected = mark(jobvariables.collected)
+ checksums = mark(jobvariables.checksums)
+ local r = collected.randomseed
if not r then
r = math.random()
math.setrandomseedi(r,"initialize")
@@ -84,16 +92,16 @@ local function initializer()
math.setrandomseedi(r,"previous run")
report_jobcontrol("resuming randomizer with %s",r)
end
- jobvariables.tobesaved.randomseed = r
- for cs, value in next, jobvariables.collected do
+ tobesaved.randomseed = r
+ for cs, value in next, collected do
texsprint(ctxcatcodes,format("\\xdef\\%s{%s}",cs,value))
end
end
-job.register('job.variables.collected', jobvariables.tobesaved, initializer)
+job.register('job.variables.collected', tobesaved, initializer)
function jobvariables.save(cs,value)
- jobvariables.tobesaved[cs] = value
+ tobesaved[cs] = value
end
local packlist = {
@@ -149,8 +157,8 @@ function job.load(filename)
local data = io.loaddata(filename)
if data and data ~= "" then
local version = tonumber(match(data,"^-- version: ([%d%.]+)"))
- if version ~= jobs.version then
- report_jobcontrol("version mismatch with jobfile: %s <> %s", version or "?", jobs.version)
+ if version ~= job.version then
+ report_jobcontrol("version mismatch with jobfile: %s <> %s", version or "?", job.version)
else
local data = loadstring(data)
if data then
diff --git a/tex/context/base/core-var.mkiv b/tex/context/base/core-var.mkiv
index 62cc9fc50..60b15437e 100644
--- a/tex/context/base/core-var.mkiv
+++ b/tex/context/base/core-var.mkiv
@@ -139,21 +139,6 @@
\unexpanded\def\stoptextproperties {\the\everystoptextproperties}
%D \macros
-%D {defineinputmode,setinputmode}
-%D
-%D New. Some work needs to be done.
-
-% not in mkiv
-
-\unexpanded\def\defineinputmode[#1]{\@EA\newtoks\csname every#1inputmode\endcsname}
-\def\setinputmode [#1]{\the\executeifdefined{every#1inputmode}\emptytoks}
-
-\defineinputmode [TEX]
-\defineinputmode [XML]
-
-\setinputmode [TEX]
-
-%D \macros
%D {trialtypesetting}
%D
%D We disable trial typesetting in the output routine,
@@ -179,7 +164,7 @@
%D
%D We need this one even if no \XML\ is supported.
-\newif\ifprocessingXML % old way
+% \newif\ifprocessingXML % old way
%D \macros
%D {ifproductionrun}
diff --git a/tex/context/base/data-aux.lua b/tex/context/base/data-aux.lua
index 943bf0a52..0a80e04ce 100644
--- a/tex/context/base/data-aux.lua
+++ b/tex/context/base/data-aux.lua
@@ -15,21 +15,21 @@ local resolvers = resolvers
local report_resolvers = logs.new("resolvers")
-function resolvers.update_script(oldname,newname) -- oldname -> own.name, not per se a suffix
+function resolvers.updatescript(oldname,newname) -- oldname -> own.name, not per se a suffix
local scriptpath = "scripts/context/lua"
newname = file.addsuffix(newname,"lua")
- local oldscript = resolvers.clean_path(oldname)
+ local oldscript = resolvers.cleanpath(oldname)
if trace_locating then
report_resolvers("to be replaced old script %s", oldscript)
end
- local newscripts = resolvers.find_files(newname) or { }
+ local newscripts = resolvers.findfiles(newname) or { }
if #newscripts == 0 then
if trace_locating then
report_resolvers("unable to locate new script")
end
else
for i=1,#newscripts do
- local newscript = resolvers.clean_path(newscripts[i])
+ local newscript = resolvers.cleanpath(newscripts[i])
if trace_locating then
report_resolvers("checking new script %s", newscript)
end
diff --git a/tex/context/base/data-con.lua b/tex/context/base/data-con.lua
index 05f1b07de..5d9650f8e 100644
--- a/tex/context/base/data-con.lua
+++ b/tex/context/base/data-con.lua
@@ -50,7 +50,8 @@ local mt = {
t.readables = readables
return readables
end
- end
+ end,
+ __storage__ = true
}
function containers.define(category, subcategory, version, enabled)
diff --git a/tex/context/base/data-ctx.lua b/tex/context/base/data-ctx.lua
index 30f974131..1bb3f9e71 100644
--- a/tex/context/base/data-ctx.lua
+++ b/tex/context/base/data-ctx.lua
@@ -12,7 +12,7 @@ local report_resolvers = logs.new("resolvers")
local resolvers = resolvers
-function resolvers.save_used_files_in_trees()
+local function saveusedfilesin_trees()
local jobname = environment.jobname
if not jobname or jobname == "" then jobname = "luatex" end
local filename = file.replacesuffix(jobname,'jlg')
@@ -40,4 +40,4 @@ function resolvers.save_used_files_in_trees()
end
end
-directives.register("system.dumpfiles", function() resolvers.save_used_files_in_trees() end)
+directives.register("system.dumpfiles", function() saveusedfilesintrees() end)
diff --git a/tex/context/base/data-env.lua b/tex/context/base/data-env.lua
index d1c110e80..be596f3bf 100644
--- a/tex/context/base/data-env.lua
+++ b/tex/context/base/data-env.lua
@@ -6,13 +6,15 @@ if not modules then modules = { } end modules ['data-env'] = {
license = "see context related readme files",
}
+local allocate = utilities.storage.allocate
+
local resolvers = resolvers
-local formats = { } resolvers.formats = formats
-local suffixes = { } resolvers.suffixes = suffixes
-local dangerous = { } resolvers.dangerous = dangerous
-local suffixmap = { } resolvers.suffixmap = suffixmap
-local alternatives = { } resolvers.alternatives = alternatives
+local formats = allocate() resolvers.formats = formats
+local suffixes = allocate() resolvers.suffixes = suffixes
+local dangerous = allocate() resolvers.dangerous = dangerous
+local suffixmap = allocate() resolvers.suffixmap = suffixmap
+local alternatives = allocate() resolvers.alternatives = alternatives
formats['afm'] = 'AFMFONTS' suffixes['afm'] = { 'afm' }
formats['enc'] = 'ENCFONTS' suffixes['enc'] = { 'enc' }
@@ -95,12 +97,12 @@ alternatives['subfont definition files'] = 'sfd'
-- A few accessors, mostly for command line tool.
-function resolvers.suffix_of_format(str)
+function resolvers.suffixofformat(str)
local s = suffixes[str]
return s and s[1] or ""
end
-function resolvers.suffixes_of_format(str)
+function resolvers.suffixesofformat(str)
return suffixes[str] or { }
end
@@ -113,13 +115,15 @@ for name, suffixlist in next, suffixes do
end
end
-setmetatable(suffixes, { __newindex = function(suffixes,name,suffixlist)
+local mt = getmetatable(suffixes)
+
+mt.__newindex = function(suffixes,name,suffixlist)
rawset(suffixes,name,suffixlist)
suffixes[name] = suffixlist
for i=1,#suffixlist do
suffixmap[suffixlist[i]] = name
end
-end } )
+end
for name, format in next, formats do
dangerous[name] = true
@@ -135,19 +139,19 @@ dangerous.tex = nil
-- more helpers
-function resolvers.format_of_var(str)
+function resolvers.formatofvariable(str)
return formats[str] or formats[alternatives[str]] or ''
end
-function resolvers.format_of_suffix(str) -- of file
+function resolvers.formatofsuffix(str) -- of file
return suffixmap[file.extname(str)] or 'tex'
end
-function resolvers.variable_of_format(str)
+function resolvers.variableofformat(str)
return formats[str] or formats[alternatives[str]] or ''
end
-function resolvers.var_of_format_or_suffix(str)
+function resolvers.vsriableofformatorsuffix(str)
local v = formats[str]
if v then
return v
diff --git a/tex/context/base/data-exp.lua b/tex/context/base/data-exp.lua
index fb7e48efd..6d15a1cd7 100644
--- a/tex/context/base/data-exp.lua
+++ b/tex/context/base/data-exp.lua
@@ -135,9 +135,9 @@ local function validate(s)
return s ~= "" and not find(s,dummy_path_expr) and s
end
-resolvers.validated_path = validate -- keeps the trailing //
+resolvers.validatedpath = validate -- keeps the trailing //
-function resolvers.expanded_path_from_list(pathlist) -- maybe not a list, just a path
+function resolvers.expandedpathfromlist(pathlist) -- maybe not a list, just a path
-- a previous version fed back into pathlist
local newlist, ok = { }, false
for k=1,#pathlist do
@@ -172,7 +172,7 @@ cleanup = lpeg.replacer {
{ "~" , function() return lpegmatch(cleanup,environment.homedir) end },
}
-function resolvers.clean_path(str)
+function resolvers.cleanpath(str)
return str and lpegmatch(cleanup,str)
end
@@ -193,7 +193,7 @@ local stripper = lpegCs(
lpegpatterns.unspacer * (dosingle + dodouble + dostring) * lpegpatterns.unspacer
)
-function resolvers.checked_variable(str) -- assumes str is a string
+function resolvers.checkedvariable(str) -- assumes str is a string
return lpegmatch(stripper,str) or str
end
@@ -209,7 +209,7 @@ local cache = { }
local splitter = lpegCt(lpeg.splitat(lpegS(ostype == "windows" and ";" or ":;"))) -- maybe add ,
-local function split_configuration_path(str) -- beware, this can be either a path or a { specification }
+local function splitconfigurationpath(str) -- beware, this can be either a path or a { specification }
if str then
local found = cache[str]
if not found then
@@ -238,19 +238,19 @@ local function split_configuration_path(str) -- beware, this can be either a pat
end
end
-resolvers.split_configuration_path = split_configuration_path
+resolvers.splitconfigurationpath = splitconfigurationpath
-function resolvers.split_path(str)
+function resolvers.splitpath(str)
if type(str) == 'table' then
return str
else
- return split_configuration_path(str)
+ return splitconfigurationpath(str)
end
end
-function resolvers.join_path(str)
+function resolvers.joinpath(str)
if type(str) == 'table' then
- return file.join_path(str)
+ return file.joinpath(str)
else
return str
end
@@ -280,7 +280,7 @@ end
local weird = lpegP(".")^1 + lpeg.anywhere(lpegS("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
-function resolvers.scan_files(specification)
+function resolvers.scanfiles(specification)
if trace_locating then
report_resolvers("scanning path '%s'",specification)
end
@@ -335,4 +335,4 @@ function resolvers.scan_files(specification)
return files
end
---~ print(table.serialize(resolvers.scan_files("t:/sources")))
+--~ print(table.serialize(resolvers.scanfiles("t:/sources")))
diff --git a/tex/context/base/data-ini.lua b/tex/context/base/data-ini.lua
index 63329b6e2..9550c1e78 100644
--- a/tex/context/base/data-ini.lua
+++ b/tex/context/base/data-ini.lua
@@ -37,14 +37,8 @@ kpse = { original = kpse }
setmetatable(kpse, {
__index = function(kp,name)
- local r = resolvers[name]
- if not r then
- r = function (...)
- report_resolvers("not supported: %s(%s)",name,concat(...))
- end
- rawset(kp,name,r)
- end
- return r
+ report_resolvers("fatal error: kpse library is accessed (key: %s)",name)
+ os.exit()
end
} )
diff --git a/tex/context/base/data-inp.lua b/tex/context/base/data-inp.lua
index 45a348b7d..b3e30a6c6 100644
--- a/tex/context/base/data-inp.lua
+++ b/tex/context/base/data-inp.lua
@@ -6,12 +6,10 @@ if not modules then modules = { } end modules ['data-inp'] = {
license = "see context related readme files"
}
-local resolvers = resolvers
+local allocate = utilities.storage.allocate
-resolvers.finders = resolvers.finders or { }
-resolvers.openers = resolvers.openers or { }
-resolvers.loaders = resolvers.loaders or { }
+local resolvers = resolvers
-resolvers.finders.notfound = { nil }
-resolvers.openers.notfound = { nil }
-resolvers.loaders.notfound = { false, nil, 0 }
+resolvers.finders = allocate { notfound = { nil } }
+resolvers.openers = allocate { notfound = { nil } }
+resolvers.loaders = allocate { notfound = { false, nil, 0 } }
diff --git a/tex/context/base/data-lua.lua b/tex/context/base/data-lua.lua
index f163361fe..fc44e5508 100644
--- a/tex/context/base/data-lua.lua
+++ b/tex/context/base/data-lua.lua
@@ -27,7 +27,7 @@ local _path_, libpaths, _cpath_, clibpaths
function package.libpaths()
if not _path_ or package.path ~= _path_ then
_path_ = package.path
- libpaths = file.split_path(_path_,";")
+ libpaths = file.splitpath(_path_,";")
end
return libpaths
end
@@ -35,7 +35,7 @@ end
function package.clibpaths()
if not _cpath_ or package.cpath ~= _cpath_ then
_cpath_ = package.cpath
- clibpaths = file.split_path(_cpath_,";")
+ clibpaths = file.splitpath(_cpath_,";")
end
return clibpaths
end
@@ -84,7 +84,7 @@ package.loaders[2] = function(name) -- was [#package.loaders+1]
end
for i=1,#libformats do
local format = libformats[i]
- local resolved = resolvers.find_file(name,format) or ""
+ local resolved = resolvers.findfile(name,format) or ""
if trace_locating then -- mode detail
report_resolvers("! checking for '%s' using 'libformat path': '%s'",name,format)
end
@@ -108,7 +108,7 @@ package.loaders[2] = function(name) -- was [#package.loaders+1]
for i=1,#clibformats do
-- better have a dedicated loop
local format = clibformats[i]
- local paths = resolvers.expanded_path_list_from_var(format)
+ local paths = resolvers.expandedpathlistfromvariable(format)
for p=1,#paths do
local path = paths[p]
local resolved = file.join(path,libname)
@@ -140,7 +140,7 @@ package.loaders[2] = function(name) -- was [#package.loaders+1]
if trace_loading then -- more detail
report_resolvers("! checking for '%s' using 'luatexlibs': '%s'",name)
end
- local resolved = resolvers.find_file(file.basename(name),'luatexlibs') or ""
+ local resolved = resolvers.findfile(file.basename(name),'luatexlibs') or ""
if resolved ~= "" then
if trace_locating then
report_resolvers("! lib '%s' located by basename via environment: '%s'",name,resolved)
diff --git a/tex/context/base/data-met.lua b/tex/context/base/data-met.lua
index fb9b4d923..06c810fc4 100644
--- a/tex/context/base/data-met.lua
+++ b/tex/context/base/data-met.lua
@@ -12,11 +12,14 @@ local trace_locating = false trackers.register("resolvers.locating", functi
local report_resolvers = logs.new("resolvers")
+local allocate = utilities.storage.allocate
+
local resolvers = resolvers
-resolvers.locators = { notfound = { nil } } -- locate databases
-resolvers.hashers = { notfound = { nil } } -- load databases
-resolvers.generators = { notfound = { nil } } -- generate databases
+resolvers.concatinators = allocate ()
+resolvers.locators = allocate { notfound = { nil } } -- locate databases
+resolvers.hashers = allocate { notfound = { nil } } -- load databases
+resolvers.generators = allocate { notfound = { nil } } -- generate databases
function resolvers.splitmethod(filename)
if not filename then
diff --git a/tex/context/base/data-pre.lua b/tex/context/base/data-pre.lua
index 94992c102..fdf304b73 100644
--- a/tex/context/base/data-pre.lua
+++ b/tex/context/base/data-pre.lua
@@ -17,7 +17,7 @@ local prefixes = { }
local getenv = resolvers.getenv
prefixes.environment = function(str) -- getenv is case insensitive anyway
- return resolvers.clean_path(getenv(str) or getenv(upper(str)) or getenv(lower(str)) or "")
+ return resolvers.cleanpath(getenv(str) or getenv(upper(str)) or getenv(lower(str)) or "")
end
prefixes.relative = function(str,n)
@@ -36,7 +36,7 @@ prefixes.relative = function(str,n)
end
end
end
- return resolvers.clean_path(str)
+ return resolvers.cleanpath(str)
end
prefixes.auto = function(str)
@@ -48,18 +48,18 @@ prefixes.auto = function(str)
end
prefixes.locate = function(str)
- local fullname = resolvers.find_given_file(str) or ""
- return resolvers.clean_path((fullname ~= "" and fullname) or str)
+ local fullname = resolvers.findgivenfile(str) or ""
+ return resolvers.cleanpath((fullname ~= "" and fullname) or str)
end
prefixes.filename = function(str)
- local fullname = resolvers.find_given_file(str) or ""
- return resolvers.clean_path(file.basename((fullname ~= "" and fullname) or str))
+ local fullname = resolvers.findgivenfile(str) or ""
+ return resolvers.cleanpath(file.basename((fullname ~= "" and fullname) or str))
end
prefixes.pathname = function(str)
- local fullname = resolvers.find_given_file(str) or ""
- return resolvers.clean_path(file.dirname((fullname ~= "" and fullname) or str))
+ local fullname = resolvers.findgivenfile(str) or ""
+ return resolvers.cleanpath(file.dirname((fullname ~= "" and fullname) or str))
end
prefixes.env = prefixes.environment
diff --git a/tex/context/base/data-res.lua b/tex/context/base/data-res.lua
index a9582262f..f7dd5cd2e 100644
--- a/tex/context/base/data-res.lua
+++ b/tex/context/base/data-res.lua
@@ -24,6 +24,7 @@ local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join
local collapse_path = file.collapse_path
+local allocate = utilities.storage.allocate
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end)
@@ -33,9 +34,9 @@ local report_resolvers = logs.new("resolvers")
local resolvers = resolvers
-local expanded_path_from_list = resolvers.expanded_path_from_list
-local checked_variable = resolvers.checked_variable
-local split_configuration_path = resolvers.split_configuration_path
+local expandedpathfromlist = resolvers.expandedpathfromlist
+local checkedvariable = resolvers.checkedvariable
+local splitconfigurationpath = resolvers.splitconfigurationpath
local initializesetter = utilities.setters.initialize
@@ -44,7 +45,7 @@ local ostype, osname, osenv, ossetenv, osgetenv = os.type, os.name, os.env, os.s
resolvers.cacheversion = '1.0.1'
resolvers.configbanner = ''
resolvers.homedir = environment.homedir
-resolvers.criticalvars = { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF", "TEXMF", "TEXOS" }
+resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF", "TEXMF", "TEXOS" }
resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}' -- rubish path
resolvers.luacnfname = 'texmfcnf.lua'
resolvers.luacnfstate = "unknown"
@@ -66,18 +67,20 @@ function resolvers.newinstance()
progname = 'context',
engine = 'luatex',
format = '',
- environment = { },
- variables = { },
- expansions = { },
- files = { },
- setups = { },
- order = { },
- found = { },
- foundintrees = { },
- origins = { },
- hashes = { },
- specification = { },
- lists = { },
+ environment = allocate(),
+ variables = allocate(),
+ expansions = allocate(),
+ files = allocate(),
+ setups = allocate(),
+ order = allocate(),
+ found = allocate(),
+ foundintrees = allocate(),
+ origins = allocate(),
+ hashes = allocate(),
+ specification = allocate(),
+ lists = allocate(),
+ data = allocate(), -- only for loading
+ fakepaths = allocate(),
remember = true,
diskcache = true,
renewcache = false,
@@ -85,15 +88,13 @@ function resolvers.newinstance()
savelists = true,
allresults = false,
pattern = nil, -- lists
- data = { }, -- only for loading
force_suffixes = true,
- fakepaths = { },
}
local ne = newinstance.environment
for k, v in next, osenv do
- ne[upper(k)] = checked_variable(v)
+ ne[upper(k)] = checkedvariable(v)
end
return newinstance
@@ -128,13 +129,13 @@ function resolvers.getenv(key)
return value
else
local e = osgetenv(key)
- return e ~= nil and e ~= "" and checked_variable(e) or ""
+ return e ~= nil and e ~= "" and checkedvariable(e) or ""
end
end
resolvers.env = resolvers.getenv
-local function expand_vars(lst) -- simple vars
+local function expandvars(lst) -- simple vars
local variables, getenv = instance.variables, resolvers.getenv
local function resolve(a)
local va = variables[a] or ""
@@ -160,10 +161,10 @@ local function resolve(key)
return value
end
local e = osgetenv(key)
- return e ~= nil and e ~= "" and checked_variable(e) or ""
+ return e ~= nil and e ~= "" and checkedvariable(e) or ""
end
-local function expanded_var(var) -- simple vars
+local function expandedvariable(var) -- simple vars
var = gsub(var,"%$([%a%d%_%-]+)",resolve)
var = gsub(var,";+",";")
var = gsub(var,";[!{}/\\]+;",";")
@@ -181,7 +182,7 @@ local function entry(entries,name)
result = resolvers.getenv(name)
if result then
instance.variables[name] = result
- resolvers.expand_variables()
+ resolvers.expandvariables()
return instance.expansions[name] or ""
end
end
@@ -198,7 +199,7 @@ local function is_entry(entries,name)
end
end
-function resolvers.report_critical_variables()
+local function reportcriticalvariables()
if trace_locating then
for i=1,#resolvers.criticalvars do
local v = resolvers.criticalvars[i]
@@ -206,7 +207,7 @@ function resolvers.report_critical_variables()
end
report_resolvers()
end
- resolvers.report_critical_variables = function() end
+ reportcriticalvariables = function() end
end
local function identify_configuration_files()
@@ -219,10 +220,10 @@ local function identify_configuration_files()
else
resolvers.luacnfstate = "environment"
end
- resolvers.report_critical_variables()
- resolvers.expand_variables()
- local cnfpaths = expanded_path_from_list(resolvers.split_path(cnfspec))
- expand_vars(cnfpaths) --- hm
+ reportcriticalvariables()
+ resolvers.expandvariables()
+ local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec))
+ expandvars(cnfpaths) --- hm
local luacnfname = resolvers.luacnfname
for i=1,#cnfpaths do
local filename = collapse_path(filejoin(cnfpaths[i],luacnfname))
@@ -327,7 +328,7 @@ local function collapse_configuration_data() -- potential optimization: pass sta
if ek and ek ~= "" then
variables[k], origins[k] = ek, "env"
else
- local bv = checked_variable(v)
+ local bv = checkedvariable(v)
variables[k], origins[k] = bv, "cnf"
end
end
@@ -348,7 +349,7 @@ function resolvers.locators.tex(specification)
if trace_locating then
report_resolvers("tex locator '%s' found",specification)
end
- resolvers.append_hash('file',specification,filename,true) -- cache
+ resolvers.appendhash('file',specification,filename,true) -- cache
elseif trace_locating then
report_resolvers("tex locator '%s' not found",specification)
end
@@ -361,7 +362,7 @@ function resolvers.hashdatabase(tag,name)
end
local function load_file_databases()
- instance.loaderror, instance.files = false, { }
+ instance.loaderror, instance.files = false, allocate()
if not instance.renewcache then
local hashes = instance.hashes
for k=1,#hashes do
@@ -384,12 +385,12 @@ end
local function locate_file_databases()
-- todo: cache:// and tree:// (runtime)
- local texmfpaths = resolvers.expanded_path_list('TEXMF')
+ local texmfpaths = resolvers.expandedpathlist('TEXMF')
for i=1,#texmfpaths do
local path = collapse_path(texmfpaths[i])
local stripped = gsub(path,"^!!","")
local runtime = stripped == path
- path = resolvers.clean_path(path)
+ path = resolvers.cleanpath(path)
if stripped ~= "" then
if lfs.isdir(path) then
local spec = resolvers.splitmethod(stripped)
@@ -462,23 +463,23 @@ local function load_databases()
end
end
-function resolvers.append_hash(type,tag,name,cache)
+function resolvers.appendhash(type,tag,name,cache)
if trace_locating then
report_resolvers("hash '%s' appended",tag)
end
insert(instance.hashes, { type = type, tag = tag, name = name, cache = cache } )
end
-function resolvers.prepend_hash(type,tag,name,cache)
+function resolvers.prependhash(type,tag,name,cache)
if trace_locating then
report_resolvers("hash '%s' prepended",tag)
end
insert(instance.hashes, 1, { type = type, tag = tag, name = name, cache = cache } )
end
-function resolvers.extend_texmf_var(specification) -- crap, we could better prepend the hash
--- local t = resolvers.expanded_path_list('TEXMF') -- full expansion
- local t = resolvers.split_path(resolvers.getenv('TEXMF'))
+function resolvers.extendtexmfvariable(specification) -- crap, we could better prepend the hash
+-- local t = resolvers.expandedpathlist('TEXMF') -- full expansion
+ local t = resolvers.splitpath(resolvers.getenv('TEXMF'))
insert(t,1,specification)
local newspec = concat(t,";")
if instance.environment["TEXMF"] then
@@ -488,18 +489,18 @@ function resolvers.extend_texmf_var(specification) -- crap, we could better prep
else
-- weird
end
- resolvers.expand_variables()
+ resolvers.expandvariables()
reset_hashes()
end
function resolvers.generators.tex(specification,tag)
- instance.files[tag or specification] = resolvers.scan_files(specification)
+ instance.files[tag or specification] = resolvers.scanfiles(specification)
end
function resolvers.splitexpansions()
local ie = instance.expansions
for k,v in next, ie do
- local t, h, p = { }, { }, split_configuration_path(v)
+ local t, h, p = { }, { }, splitconfigurationpath(v)
for kk=1,#p do
local vv = p[kk]
if vv ~= "" and not h[vv] then
@@ -520,12 +521,12 @@ end
-- we used to have 'files' and 'configurations' so therefore the following
-- shared function
-function resolvers.data_state()
+function resolvers.datastate()
return caches.contentstate()
end
-function resolvers.expand_variables()
- local expansions, environment, variables = { }, instance.environment, instance.variables
+function resolvers.expandvariables()
+ local expansions, environment, variables = allocate(), instance.environment, instance.variables
local getenv = resolvers.getenv
instance.expansions = expansions
local engine, progname = instance.engine, instance.progname
@@ -586,19 +587,19 @@ function resolvers.is_expansion(name)
return is_entry(instance.expansions,name)
end
-function resolvers.unexpanded_path_list(str)
+function resolvers.unexpandedpathlist(str)
local pth = resolvers.variable(str)
- local lst = resolvers.split_path(pth)
- return expanded_path_from_list(lst)
+ local lst = resolvers.splitpath(pth)
+ return expandedpathfromlist(lst)
end
-function resolvers.unexpanded_path(str)
- return file.join_path(resolvers.unexpanded_path_list(str))
+function resolvers.unexpandedpath(str)
+ return file.joinpath(resolvers.unexpandedpath_list(str))
end
local done = { }
-function resolvers.reset_extra_path()
+function resolvers.resetextrapath()
local ep = instance.extra_paths
if not ep then
ep, done = { }, { }
@@ -608,7 +609,7 @@ function resolvers.reset_extra_path()
end
end
-function resolvers.register_extra_path(paths,subpaths)
+function resolvers.registerextrapath(paths,subpaths)
local ep = instance.extra_paths or { }
local n = #ep
if paths and paths ~= "" then
@@ -618,7 +619,7 @@ function resolvers.register_extra_path(paths,subpaths)
for s in gmatch(subpaths,"[^,]+") do
local ps = p .. "/" .. s
if not done[ps] then
- ep[#ep+1] = resolvers.clean_path(ps)
+ ep[#ep+1] = resolvers.cleanpath(ps)
done[ps] = true
end
end
@@ -626,7 +627,7 @@ function resolvers.register_extra_path(paths,subpaths)
else
for p in gmatch(paths,"[^,]+") do
if not done[p] then
- ep[#ep+1] = resolvers.clean_path(p)
+ ep[#ep+1] = resolvers.cleanpath(p)
done[p] = true
end
end
@@ -637,7 +638,7 @@ function resolvers.register_extra_path(paths,subpaths)
for s in gmatch(subpaths,"[^,]+") do
local ps = ep[i] .. "/" .. s
if not done[ps] then
- ep[#ep+1] = resolvers.clean_path(ps)
+ ep[#ep+1] = resolvers.cleanpath(ps)
done[ps] = true
end
end
@@ -689,54 +690,54 @@ local function made_list(instance,list)
end
end
-function resolvers.clean_path_list(str)
- local t = resolvers.expanded_path_list(str)
+function resolvers.cleanpathlist(str)
+ local t = resolvers.expandedpathlist(str)
if t then
for i=1,#t do
- t[i] = collapse_path(resolvers.clean_path(t[i]))
+ t[i] = collapse_path(resolvers.cleanpath(t[i]))
end
end
return t
end
-function resolvers.expand_path(str)
- return file.join_path(resolvers.expanded_path_list(str))
+function resolvers.expandpath(str)
+ return file.joinpath(resolvers.expandedpathlist(str))
end
-function resolvers.expanded_path_list(str)
+function resolvers.expandedpathlist(str)
if not str then
return ep or { } -- ep ?
elseif instance.savelists then
-- engine+progname hash
str = gsub(str,"%$","")
if not instance.lists[str] then -- cached
- local lst = made_list(instance,resolvers.split_path(resolvers.expansion(str)))
- instance.lists[str] = expanded_path_from_list(lst)
+ local lst = made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
+ instance.lists[str] = expandedpathfromlist(lst)
end
return instance.lists[str]
else
- local lst = resolvers.split_path(resolvers.expansion(str))
- return made_list(instance,expanded_path_from_list(lst))
+ local lst = resolvers.splitpath(resolvers.expansion(str))
+ return made_list(instance,expandedpathfromlist(lst))
end
end
-function resolvers.expanded_path_list_from_var(str) -- brrr
- local tmp = resolvers.var_of_format_or_suffix(gsub(str,"%$",""))
+function resolvers.expandedpathlistfromvariable(str) -- brrr
+ local tmp = resolvers.variableofformatorsuffix(gsub(str,"%$",""))
if tmp ~= "" then
- return resolvers.expanded_path_list(tmp)
+ return resolvers.expandedpathlist(tmp)
else
- return resolvers.expanded_path_list(str)
+ return resolvers.expandedpathlist(str)
end
end
-function resolvers.expand_path_from_var(str)
- return file.join_path(resolvers.expanded_path_list_from_var(str))
+function resolvers.expandpathfromvariable(str)
+ return file.joinpath(resolvers.expandedpathlistfromvariable(str))
end
-function resolvers.expand_braces(str) -- output variable and brace expansion of STRING
+function resolvers.expandbraces(str) -- output variable and brace expansion of STRING
local ori = resolvers.variable(str)
- local pth = expanded_path_from_list(resolvers.split_path(ori))
- return file.join_path(pth)
+ local pth = expandedpathfromlist(resolvers.splitpath(ori))
+ return file.joinpath(pth)
end
resolvers.isreadable = { }
@@ -825,7 +826,7 @@ local function collect_files(names)
return #filelist > 0 and filelist or nil
end
-function resolvers.register_in_trees(name)
+function resolvers.registerintrees(name)
if not find(name,"^%.") then
instance.foundintrees[name] = (instance.foundintrees[name] or 0) + 1 -- maybe only one
end
@@ -856,7 +857,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
if trace_locating then
report_resolvers("remembering file '%s'",filename)
end
- resolvers.register_in_trees(filename) -- for tracing used files
+ resolvers.registerintrees(filename) -- for tracing used files
return instance.found[stamp]
end
end
@@ -873,7 +874,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
if trace_locating then
report_resolvers("checking wildcard '%s'", filename)
end
- result = resolvers.find_wildcard_files(filename)
+ result = resolvers.findwildcardfiles(filename)
elseif file.is_qualified_path(filename) then
if resolvers.isreadable.file(filename) then
if trace_locating then
@@ -916,7 +917,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
local savedformat = instance.format
local format = savedformat or ""
if format == "" then
- instance.format = resolvers.format_of_suffix(suffix)
+ instance.format = resolvers.formatofsuffix(suffix)
end
if not format then
instance.format = "othertextfiles" -- kind of everything, maybe texinput is better
@@ -973,12 +974,12 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
if ext == "" or not suffixmap[ext] then
local forcedname = filename .. '.tex'
wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.format_of_suffix(forcedname)
+ filetype = resolvers.formatofsuffix(forcedname)
if trace_locating then
report_resolvers("forcing filetype '%s'",filetype)
end
else
- filetype = resolvers.format_of_suffix(filename)
+ filetype = resolvers.formatofsuffix(filename)
if trace_locating then
report_resolvers("using suffix based filetype '%s'",filetype)
end
@@ -997,8 +998,8 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
report_resolvers("using given filetype '%s'",filetype)
end
end
- local typespec = resolvers.variable_of_format(filetype)
- local pathlist = resolvers.expanded_path_list(typespec)
+ local typespec = resolvers.variableofformat(filetype)
+ local pathlist = resolvers.expandedpathlist(typespec)
if not pathlist or #pathlist == 0 then
-- no pathlist, access check only / todo == wildcard
if trace_detail then
@@ -1113,7 +1114,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
for k=1,#result do
local rk = collapse_path(result[k])
result[k] = rk
- resolvers.register_in_trees(rk) -- for tracing used files
+ resolvers.registerintrees(rk) -- for tracing used files
end
if instance.remember then
instance.found[stamp] = result
@@ -1121,12 +1122,10 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
return result
end
-if not resolvers.concatinators then resolvers.concatinators = { } end
-
resolvers.concatinators.tex = filejoin
resolvers.concatinators.file = resolvers.concatinators.tex
-function resolvers.find_files(filename,filetype,mustexist)
+function resolvers.findfiles(filename,filetype,mustexist)
if type(mustexist) == boolean then
-- all set
elseif type(filetype) == 'boolean' then
@@ -1146,17 +1145,17 @@ function resolvers.find_files(filename,filetype,mustexist)
return result
end
-function resolvers.find_file(filename,filetype,mustexist)
- return (resolvers.find_files(filename,filetype,mustexist)[1] or "")
+function resolvers.findfile(filename,filetype,mustexist)
+ return (resolvers.findfiles(filename,filetype,mustexist)[1] or "")
end
-function resolvers.find_path(filename,filetype)
- local path = resolvers.find_files(filename,filetype)[1] or ""
+function resolvers.findpath(filename,filetype)
+ local path = resolvers.findfiles(filename,filetype)[1] or ""
-- todo return current path
return file.dirname(path)
end
-function resolvers.find_given_files(filename)
+function resolvers.findgivenfiles(filename)
local bname, result = filebasename(filename), { }
local hashes = instance.hashes
for k=1,#hashes do
@@ -1187,8 +1186,8 @@ function resolvers.find_given_files(filename)
return result
end
-function resolvers.find_given_file(filename)
- return (resolvers.find_given_files(filename)[1] or "")
+function resolvers.findgivenfile(filename)
+ return (resolvers.findgivenfiles(filename)[1] or "")
end
local function doit(path,blist,bname,tag,kind,result,allresults)
@@ -1214,7 +1213,7 @@ local function doit(path,blist,bname,tag,kind,result,allresults)
return done
end
-function resolvers.find_wildcard_files(filename) -- todo: remap: and lpeg
+function resolvers.findwildcardfiles(filename) -- todo: remap: and lpeg
local result = { }
local bname, dname = filebasename(filename), filedirname(filename)
local path = gsub(dname,"^*/","")
@@ -1257,8 +1256,8 @@ function resolvers.find_wildcard_files(filename) -- todo: remap: and lpeg
return result
end
-function resolvers.find_wildcard_file(filename)
- return (resolvers.find_wildcard_files(filename)[1] or "")
+function resolvers.findwildcardfile(filename)
+ return (resolvers.findwildcardfiles(filename)[1] or "")
end
-- main user functions
@@ -1272,7 +1271,7 @@ function resolvers.load(option)
identify_configuration_files()
load_configuration_files()
collapse_configuration_data()
- resolvers.expand_variables()
+ resolvers.expandvariables()
if option ~= "nofiles" then
load_databases()
resolvers.automount()
@@ -1282,15 +1281,16 @@ function resolvers.load(option)
return files and next(files) and true
end
-function resolvers.for_files(command, files, filetype, mustexist)
+local function report(str)
+ if trace_locating then
+ report_resolvers(str) -- has already verbose
+ else
+ print(str)
+ end
+end
+
+function resolvers.dowithfilesandreport(command, files, filetype, mustexist)
if files and #files > 0 then
- local function report(str)
- if trace_locating then
- report_resolvers(str) -- has already verbose
- else
- print(str)
- end
- end
if trace_locating then
report('') -- ?
end
@@ -1308,21 +1308,21 @@ function resolvers.for_files(command, files, filetype, mustexist)
end
end
--- strtab
+-- obsolete
-resolvers.var_value = resolvers.variable -- output the value of variable $STRING.
-resolvers.expand_var = resolvers.expansion -- output variable expansion of STRING.
+-- resolvers.varvalue = resolvers.variable -- output the value of variable $STRING.
+-- resolvers.expandvar = resolvers.expansion -- output variable expansion of STRING.
-function resolvers.show_path(str) -- output search path for file type NAME
- return file.join_path(resolvers.expanded_path_list(resolvers.format_of_var(str)))
+function resolvers.showpath(str) -- output search path for file type NAME
+ return file.joinpath(resolvers.expandedpathlist(resolvers.formatofvariable(str)))
end
--- resolvers.find_file(filename)
--- resolvers.find_file(filename, filetype, mustexist)
--- resolvers.find_file(filename, mustexist)
--- resolvers.find_file(filename, filetype)
+-- resolvers.findfile(filename)
+-- resolvers.findfile(filename, filetype, mustexist)
+-- resolvers.findfile(filename, mustexist)
+-- resolvers.findfile(filename, filetype)
-function resolvers.register_file(files, name, path)
+function resolvers.registerfile(files, name, path)
if files[name] then
if type(files[name]) == 'string' then
files[name] = { files[name], path }
@@ -1334,23 +1334,23 @@ function resolvers.register_file(files, name, path)
end
end
-function resolvers.do_with_path(name,func)
- local pathlist = resolvers.expanded_path_list(name)
+function resolvers.dowithpath(name,func)
+ local pathlist = resolvers.expandedpathlist(name)
for i=1,#pathlist do
- func("^"..resolvers.clean_path(pathlist[i]))
+ func("^"..resolvers.cleanpath(pathlist[i]))
end
end
-function resolvers.do_with_var(name,func)
- func(expanded_var(name))
+function resolvers.dowithvariable(name,func)
+ func(expandedvariable(name))
end
-function resolvers.locate_format(name)
+function resolvers.locateformat(name)
local barename = gsub(name,"%.%a+$","")
local fmtname = caches.getfirstreadablefile(barename..".fmt","formats") or ""
if fmtname == "" then
- fmtname = resolvers.find_files(barename..".fmt")[1] or ""
- fmtname = resolvers.clean_path(fmtname)
+ fmtname = resolvers.findfiles(barename..".fmt")[1] or ""
+ fmtname = resolvers.cleanpath(fmtname)
end
if fmtname ~= "" then
local barename = file.removesuffix(fmtname)
@@ -1366,7 +1366,7 @@ function resolvers.locate_format(name)
return nil, nil
end
-function resolvers.boolean_variable(str,default)
+function resolvers.booleanvariable(str,default)
local b = resolvers.expansion(str)
if b == "" then
return default
@@ -1376,7 +1376,7 @@ function resolvers.boolean_variable(str,default)
end
end
-function resolvers.with_files(pattern,handle,before,after) -- can be a nice iterator instead
+function resolvers.dowithfilesintree(pattern,handle,before,after) -- can be a nice iterator instead
local instance = resolvers.instance
local hashes = instance.hashes
for i=1,#hashes do
@@ -1419,3 +1419,9 @@ function resolvers.with_files(pattern,handle,before,after) -- can be a nice iter
end
end
end
+
+resolvers.obsolete = resolvers.obsolete or { }
+local obsolete = resolvers.obsolete
+
+resolvers.find_file = resolvers.findfile obsolete.find_file = resolvers.findfile
+resolvers.find_files = resolvers.findfiles obsolete.find_files = resolvers.findfiles
diff --git a/tex/context/base/data-tex.lua b/tex/context/base/data-tex.lua
index d597b26a2..cf66913c1 100644
--- a/tex/context/base/data-tex.lua
+++ b/tex/context/base/data-tex.lua
@@ -22,7 +22,7 @@ local finders, openers, loaders = resolvers.finders, resolvers.openers, resolver
local checkgarbage = utilities.garbagecollector and utilities.garbagecollector.check
function finders.generic(tag,filename,filetype)
- local foundname = resolvers.find_file(filename,filetype)
+ local foundname = resolvers.findfile(filename,filetype)
if foundname and foundname ~= "" then
if trace_locating then
report_resolvers("%s finder: file '%s' found",tag,filename)
@@ -39,15 +39,17 @@ end
--~ local lpegmatch = lpeg.match
--~ local getlines = lpeg.Ct(lpeg.patterns.textline)
+resolvers.filters = resolvers.filters or { }
+
local input_translator, utf_translator, user_translator = nil, nil, nil
-function resolvers.install_text_filter(name,func)
+function resolvers.filters.install(name,func)
if name == "input" then input_translator = func
elseif name == "utf" then utf_translator = func
elseif name == "user" then user_translator = func end
end
-function openers.text_opener(filename,file_handle,tag)
+function openers.textopener(filename,file_handle,tag)
local u = unicode.utftype(file_handle)
local t = { }
if u > 0 then
@@ -161,7 +163,7 @@ function openers.generic(tag,filename)
if trace_locating then
report_resolvers("%s opener, file '%s' opened",tag,filename)
end
- return openers.text_opener(filename,f,tag)
+ return openers.textopener(filename,f,tag)
end
end
if trace_locating then
diff --git a/tex/context/base/data-tmp.lua b/tex/context/base/data-tmp.lua
index aeca105a0..e4bef66d8 100644
--- a/tex/context/base/data-tmp.lua
+++ b/tex/context/base/data-tmp.lua
@@ -28,7 +28,7 @@ local mkdirs, isdir = dir.mkdirs, lfs.isdir
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end)
-local report_cache = logs.new("cache")
+local report_cache = logs.new("cache")
local report_resolvers = logs.new("resolvers")
local resolvers = resolvers
@@ -51,12 +51,12 @@ local writable, readables, usedreadables = nil, { }, { }
local function identify()
-- Combining the loops makes it messy. First we check the format cache path
-- and when the last component is not present we try to create it.
- local texmfcaches = resolvers.clean_path_list("TEXMFCACHE")
+ local texmfcaches = resolvers.cleanpathlist("TEXMFCACHE")
if texmfcaches then
for k=1,#texmfcaches do
local cachepath = texmfcaches[k]
if cachepath ~= "" then
- cachepath = resolvers.clean_path(cachepath)
+ cachepath = resolvers.cleanpath(cachepath)
cachepath = file.collapse_path(cachepath)
local valid = isdir(cachepath)
if valid then
@@ -90,7 +90,7 @@ local function identify()
local cachepath = texmfcaches[k]
cachepath = resolvers.getenv(cachepath)
if cachepath ~= "" then
- cachepath = resolvers.clean_path(cachepath)
+ cachepath = resolvers.cleanpath(cachepath)
local valid = isdir(cachepath)
if valid and file.is_readable(cachepath) then
if not writable and file.is_writable(cachepath) then
@@ -112,7 +112,7 @@ local function identify()
os.exit()
end
-- why here
- writable = dir.expandname(resolvers.clean_path(writable)) -- just in case
+ writable = dir.expandname(resolvers.cleanpath(writable)) -- just in case
-- moved here
local base, more, tree = caches.base, caches.more, caches.tree or caches.treehash() -- we have only one writable tree
if tree then
@@ -277,8 +277,8 @@ function caches.savedata(filepath,filename,data,raw)
else
table.tofile(tmaname, data,'return',false,true,false) -- maybe not the last true
end
- local cleanup = resolvers.boolean_variable("PURGECACHE", false)
- local strip = resolvers.boolean_variable("LUACSTRIP", true)
+ local cleanup = resolvers.booleanvariable("PURGECACHE", false)
+ local strip = resolvers.booleanvariable("LUACSTRIP", true)
utilities.lua.compile(tmaname, tmcname, cleanup, strip)
end
@@ -356,5 +356,3 @@ function caches.savecontent(cachename,dataname,content)
report_resolvers("unable to save '%s' in '%s' (access error)",dataname,luaname)
end
end
-
-
diff --git a/tex/context/base/data-tre.lua b/tex/context/base/data-tre.lua
index cfa8e3c5d..f119e52e7 100644
--- a/tex/context/base/data-tre.lua
+++ b/tex/context/base/data-tre.lua
@@ -52,7 +52,7 @@ function resolvers.locators.tree(specification)
if trace_locating then
report_resolvers("tree locator '%s' found (%s)",path,specification)
end
- resolvers.append_hash('tree',specification,path,false) -- don't cache
+ resolvers.appendhash('tree',specification,path,false) -- don't cache
elseif trace_locating then
report_resolvers("tree locator '%s' not found",path)
end
diff --git a/tex/context/base/data-use.lua b/tex/context/base/data-use.lua
index 75f2ebff2..d4e9b53fe 100644
--- a/tex/context/base/data-use.lua
+++ b/tex/context/base/data-use.lua
@@ -19,7 +19,7 @@ local resolvers = resolvers
resolvers.automounted = resolvers.automounted or { }
function resolvers.automount(usecache)
- local mountpaths = resolvers.clean_path_list(resolvers.expansion('TEXMFMOUNT'))
+ local mountpaths = resolvers.cleanpathlist(resolvers.expansion('TEXMFMOUNT'))
if (not mountpaths or #mountpaths == 0) and usecache then
mountpaths = caches.getreadablepaths("mount")
end
@@ -63,7 +63,7 @@ function statistics.savefmtstatus(texname,formatbanner,sourcefile) -- texname ==
local luvdata = {
enginebanner = enginebanner,
formatbanner = formatbanner,
- sourcehash = md5.hex(io.loaddata(resolvers.find_file(sourcefile)) or "unknown"),
+ sourcehash = md5.hex(io.loaddata(resolvers.findfile(sourcefile)) or "unknown"),
sourcefile = sourcefile,
}
io.savedata(luvname,table.serialize(luvdata,true))
@@ -77,7 +77,7 @@ function statistics.checkfmtstatus(texname)
if lfs.isfile(luvname) then
local luv = dofile(luvname)
if luv and luv.sourcefile then
- local sourcehash = md5.hex(io.loaddata(resolvers.find_file(luv.sourcefile)) or "unknown")
+ local sourcehash = md5.hex(io.loaddata(resolvers.findfile(luv.sourcefile)) or "unknown")
local luvbanner = luv.enginebanner or "?"
if luvbanner ~= enginebanner then
return format("engine mismatch (luv: %s <> bin: %s)",luvbanner,enginebanner)
diff --git a/tex/context/base/data-zip.lua b/tex/context/base/data-zip.lua
index 1b261c45e..784ecb6b7 100644
--- a/tex/context/base/data-zip.lua
+++ b/tex/context/base/data-zip.lua
@@ -47,7 +47,7 @@ function zip.openarchive(name)
else
local arch = archives[name]
if not arch then
- local full = resolvers.find_file(name) or ""
+ local full = resolvers.findfile(name) or ""
arch = (full ~= "" and zip.open(full)) or false
archives[name] = arch
end
@@ -141,7 +141,7 @@ function openers.zip(specification)
if trace_locating then
report_resolvers("zip opener, file '%s' found",q.name)
end
- return openers.text_opener(specification,dfile,'zip')
+ return openers.textopener(specification,dfile,'zip')
elseif trace_locating then
report_resolvers("zip opener, file '%s' not found",q.name)
end
@@ -205,10 +205,10 @@ function resolvers.usezipfile(zipname)
report_resolvers("zip registering, registering archive '%s'",zipname)
end
statistics.starttiming(instance)
- resolvers.prepend_hash('zip',zipname,zipfile)
- resolvers.extend_texmf_var(zipname) -- resets hashes too
+ resolvers.prependhash('zip',zipname,zipfile)
+ resolvers.extendtexmfvariable(zipname) -- resets hashes too
registeredfiles[zipname] = z
- instance.files[zipname] = resolvers.register_zip_file(z,tree or "")
+ instance.files[zipname] = resolvers.registerzipfile(z,tree or "")
statistics.stoptiming(instance)
elseif trace_locating then
report_resolvers("zip registering, unknown archive '%s'",zipname)
@@ -218,7 +218,7 @@ function resolvers.usezipfile(zipname)
end
end
-function resolvers.register_zip_file(z,tree)
+function resolvers.registerzipfile(z,tree)
local files, filter = { }, ""
if tree == "" then
filter = "^(.+)/(.-)$"
@@ -228,7 +228,7 @@ function resolvers.register_zip_file(z,tree)
if trace_locating then
report_resolvers("zip registering, using filter '%s'",filter)
end
- local register, n = resolvers.register_file, 0
+ local register, n = resolvers.registerfile, 0
for i in z:files() do
local path, name = match(i.filename,filter)
if path then
diff --git a/tex/context/base/font-afm.lua b/tex/context/base/font-afm.lua
index 8a9fabed1..f0c440e9c 100644
--- a/tex/context/base/font-afm.lua
+++ b/tex/context/base/font-afm.lua
@@ -34,10 +34,14 @@ fonts.afm = fonts.afm or { }
local afm = fonts.afm
local tfm = fonts.tfm
-afm.version = 1.402 -- incrementing this number one up will force a re-cache
-afm.syncspace = true -- when true, nicer stretch values
-afm.enhance_data = true -- best leave this set to true
-afm.cache = containers.define("fonts", "afm", afm.version, true)
+afm.version = 1.402 -- incrementing this number one up will force a re-cache
+afm.syncspace = true -- when true, nicer stretch values
+afm.addligatures = true -- best leave this set to true
+afm.addtexligatures = true -- best leave this set to true
+afm.addkerns = true -- best leave this set to true
+afm.cache = containers.define("fonts", "afm", afm.version, true)
+
+local definers = fonts.definers
local afmfeatures = {
aux = { },
@@ -260,11 +264,11 @@ by adding ligatures and kern information to the afm derived data. That
way we can set them faster when defining a font.</p>
--ldx]]--
-local add_kerns, add_ligatures, unify -- we will implement these later
+local addkerns, addligatures, unify -- we will implement these later
function afm.load(filename)
-- hm, for some reasons not resolved yet
- filename = resolvers.find_file(filename,'afm') or ""
+ filename = resolvers.findfile(filename,'afm') or ""
if filename ~= "" then
local name = file.removesuffix(file.basename(filename))
local data = containers.read(afm.cache,name)
@@ -272,9 +276,9 @@ function afm.load(filename)
local size, time = attr.size or 0, attr.modification or 0
--
local pfbfile = file.replacesuffix(name,"pfb")
- local pfbname = resolvers.find_file(pfbfile,"pfb") or ""
+ local pfbname = resolvers.findfile(pfbfile,"pfb") or ""
if pfbname == "" then
- pfbname = resolvers.find_file(file.basename(pfbfile),"pfb") or ""
+ pfbname = resolvers.findfile(file.basename(pfbfile),"pfb") or ""
end
local pfbsize, pfbtime = 0, 0
if pfbname ~= "" then
@@ -294,16 +298,20 @@ function afm.load(filename)
end
report_afm( "unifying %s",filename)
unify(data,filename)
- if afm.enhance_data then
+ if afm.addligatures then
report_afm( "add ligatures")
- add_ligatures(data,'ligatures') -- easier this way
+ addligatures(data,'ligatures') -- easier this way
+ end
+ if afm.addtexligatures then
report_afm( "add tex-ligatures")
- add_ligatures(data,'texligatures') -- easier this way
+ addligatures(data,'texligatures') -- easier this way
+ end
+ if afm.addkerns then
report_afm( "add extra kerns")
- add_kerns(data) -- faster this way
+ addkerns(data) -- faster this way
end
report_afm( "add tounicode data")
- fonts.map.add_to_unicode(data,filename)
+ fonts.map.addtounicode(data,filename)
data.size = size
data.time = time
data.pfbsize = pfbsize
@@ -323,7 +331,7 @@ end
unify = function(data, filename)
local unicodevector = fonts.enc.load('unicode').hash
local glyphs, indices, unicodes, names = { }, { }, { }, { }
- local verbose, private = fonts.verbose, fonts.private
+ local verbose, private = fonts.verbose, fonts.privateoffset
for name, blob in next, data.characters do
local code = unicodevector[name] -- or characters.name_to_unicode[name]
if not code then
@@ -370,7 +378,7 @@ end
and extra kerns. This saves quite some lookups later.</p>
--ldx]]--
-add_ligatures = function(afmdata,ligatures)
+addligatures = function(afmdata,ligatures)
local glyphs, luatex = afmdata.glyphs, afmdata.luatex
local indices, unicodes, names = luatex.indices, luatex.unicodes, luatex.names
for k,v in next, characters[ligatures] do -- main characters table
@@ -398,7 +406,7 @@ end
them selectively.</p>
--ldx]]--
-add_kerns = function(afmdata)
+addkerns = function(afmdata)
local glyphs = afmdata.glyphs
local names = afmdata.luatex.names
local uncomposed = characters.uncomposed
@@ -458,7 +466,7 @@ end
-- once we have otf sorted out (new format) we can try to make the afm
-- cache similar to it (similar tables)
-local function add_dimensions(data) -- we need to normalize afm to otf i.e. indexed table instead of name
+local function adddimensions(data) -- we need to normalize afm to otf i.e. indexed table instead of name
if data then
for index, glyph in next, data.glyphs do
local bb = glyph.boundingbox
@@ -496,7 +504,7 @@ local function copytotfm(data)
characters[u] = { }
descriptions[u] = d
end
- local filename = fonts.tfm.checked_filename(luatex) -- was metadata.filename
+ local filename = fonts.tfm.checkedfilename(luatex) -- was metadata.filename
local fontname = metadata.fontname or metadata.fullname
local fullname = metadata.fullname or metadata.fontname
local endash, emdash, spacer, spaceunits = unicodes['space'], unicodes['emdash'], "space", 500
@@ -671,10 +679,10 @@ local function setfeatures(tfmdata)
end
local function checkfeatures(specification)
- local features, done = fonts.define.check(specification.features.normal,afmfeatures.default)
+ local features, done = definers.check(specification.features.normal,afmfeatures.default)
if done then
specification.features.normal = features
- tfm.hash_instance(specification,true)
+ tfm.hashinstance(specification,true)
end
end
@@ -697,14 +705,14 @@ local function afmtotfm(specification)
return nil
else
checkfeatures(specification)
- specification = fonts.define.resolve(specification) -- new, was forgotten
+ specification = definers.resolve(specification) -- new, was forgotten
local features = specification.features.normal
local cache_id = specification.hash
local tfmdata = containers.read(tfm.cache, cache_id) -- cache with features applied
if not tfmdata then
local afmdata = afm.load(afmname)
if afmdata and next(afmdata) then
- add_dimensions(afmdata)
+ adddimensions(afmdata)
tfmdata = copytotfm(afmdata)
if tfmdata and next(tfmdata) then
local shared = tfmdata.shared
@@ -731,23 +739,23 @@ those cases, but now that we can handle <l n='opentype'/> directly we no longer
need this features.</p>
--ldx]]--
-tfm.default_encoding = 'unicode'
-
-function tfm.set_normal_feature(specification,name,value)
- if specification and name then
- local features = specification.features
- if not features then
- features = { }
- specification.features = features
- end
- local normalfeatures = features.normal
- if normalfeatures then
- normalfeatures[name] = value
- else
- features.normal = { [name] = value }
- end
- end
-end
+-- tfm.default_encoding = 'unicode'
+--
+-- function tfm.setnormalfeature(specification,name,value)
+-- if specification and name then
+-- local features = specification.features
+-- if not features then
+-- features = { }
+-- specification.features = features
+-- end
+-- local normalfeatures = features.normal
+-- if normalfeatures then
+-- normalfeatures[name] = value
+-- else
+-- features.normal = { [name] = value }
+-- end
+-- end
+-- end
function tfm.read_from_afm(specification)
local tfmtable = afmtotfm(specification)
diff --git a/tex/context/base/font-agl.lua b/tex/context/base/font-agl.lua
index 3eac792d7..729863618 100644
--- a/tex/context/base/font-agl.lua
+++ b/tex/context/base/font-agl.lua
@@ -6,10 +6,13 @@ if not modules then modules = { } end modules ['font-map'] = {
comment = "Adobe Glyph List, version 2.0, September 20, 2002",
}
-local fonts = fonts
-fonts.map = fonts.map or { }
+local allocate = utilities.storage.allocate
-fonts.map.unicode_to_agl = {
+local enc = fonts.enc
+local agl = { }
+enc.agl = agl
+
+agl.names = allocate { -- to name
"controlSTX",
"controlSOT",
"controlETX",
@@ -3692,4 +3695,4 @@ fonts.map.unicode_to_agl = {
[0xFFE6] = "wonmonospace",
}
-fonts.map.agl_to_unicode = table.swapped(fonts.map.unicode_to_agl)
+agl.unicodes = allocate(table.swapped(agl.names)) -- to unicode
diff --git a/tex/context/base/font-chk.lua b/tex/context/base/font-chk.lua
index 4e6f2cd9e..1fa520452 100644
--- a/tex/context/base/font-chk.lua
+++ b/tex/context/base/font-chk.lua
@@ -30,7 +30,7 @@ local remove_node = nodes.remove
checkers.enabled = false
checkers.delete = false
-function fonts.register_message(font,char,message)
+local function registermessage(font,char,message)
local tfmdata = fontdata[font]
local shared = tfmdata.shared
local messages = shared.messages
@@ -49,6 +49,8 @@ function fonts.register_message(font,char,message)
end
end
+local registermessage = fonts.registermessage
+
function checkers.missing(head)
if checkers.enabled then
local lastfont, characters, found = nil, nil, nil
@@ -59,9 +61,9 @@ function checkers.missing(head)
end
if not characters[char] and is_character[chardata[char].category] then
if checkers.delete then
- fonts.register_message(font,char,"missing (will be deleted)")
+ registermessage(font,char,"missing (will be deleted)")
else
- fonts.register_message(font,char,"missing")
+ registermessage(font,char,"missing")
end
if not found then
found = { n }
diff --git a/tex/context/base/font-cid.lua b/tex/context/base/font-cid.lua
index 55b84e4ee..1d03bca4d 100644
--- a/tex/context/base/font-cid.lua
+++ b/tex/context/base/font-cid.lua
@@ -92,7 +92,7 @@ local function locate(registry,ordering,supplement)
if trace_loading then
report_otf("checking cidmap, registry: %s, ordering: %s, supplement: %s, filename: %s",registry,ordering,supplement,filename)
end
- local fullname = resolvers.find_file(filename,'cid') or ""
+ local fullname = resolvers.findfile(filename,'cid') or ""
if fullname ~= "" then
cidmap = cid.load(fullname)
if cidmap then
diff --git a/tex/context/base/font-clr.lua b/tex/context/base/font-clr.lua
index ef98c2f06..3733aaf68 100644
--- a/tex/context/base/font-clr.lua
+++ b/tex/context/base/font-clr.lua
@@ -8,7 +8,8 @@ if not modules then modules = { } end modules ['font-clr'] = {
-- moved from ini:
-fonts.color = fonts.color or { } -- dummy in ini
+fonts.colors = fonts.colors or { } -- dummy in ini
+local colors = fonts.colors
local set_attribute = node.set_attribute
local unset_attribute = node.unset_attribute
@@ -16,7 +17,7 @@ local unset_attribute = node.unset_attribute
local attribute = attributes.private('color')
local mapping = attributes and attributes.list[attribute] or { }
-function fonts.color.set(n,c)
+function colors.set(n,c)
local mc = mapping[c]
if not mc then
unset_attribute(n,attribute)
@@ -25,6 +26,6 @@ function fonts.color.set(n,c)
end
end
-function fonts.color.reset(n)
+function colors.reset(n)
unset_attribute(n,attribute)
end
diff --git a/tex/context/base/font-ctx.lua b/tex/context/base/font-ctx.lua
index f95d78668..951390f5e 100644
--- a/tex/context/base/font-ctx.lua
+++ b/tex/context/base/font-ctx.lua
@@ -8,8 +8,9 @@ if not modules then modules = { } end modules ['font-ctx'] = {
-- needs a cleanup: merge of replace, lang/script etc
-local texsprint, count, texsetcount = tex.sprint, tex.count, tex.setcount
-local format, concat, gmatch, match, find, lower, gsub, byte = string.format, table.concat, string.gmatch, string.match, string.find, string.lower, string.gsub, string.byte
+local texsprint, count, texsetcount, write_nl = tex.sprint, tex.count, tex.setcount, texio.write_nl
+local format, gmatch, match, find, lower, gsub, byte = string.format, string.gmatch, string.match, string.find, string.lower, string.gsub, string.byte
+local concat, serialize = table.concat, table.serialize
local settings_to_hash, hash_to_string = utilities.parsers.settings_to_hash, utilities.parsers.hash_to_string
local formatcolumns = utilities.formatters.formatcolumns
@@ -27,21 +28,21 @@ local report_define = logs.new("define fonts")
local report_usage = logs.new("fonts usage")
local report_mapfiles = logs.new("mapfiles")
-local fonts = fonts
-local tfm = fonts.tfm
-local define = fonts.define
-local fontdata = fonts.identifiers
-local specify = define.specify
-
-specify.context_setups = specify.context_setups or { }
-specify.context_numbers = specify.context_numbers or { }
-specify.context_merged = specify.context_merged or { }
-specify.synonyms = specify.synonyms or { }
-
-local setups = specify.context_setups
-local numbers = specify.context_numbers
-local merged = specify.context_merged
-local synonyms = specify.synonyms
+local fonts = fonts
+local tfm = fonts.tfm
+local fontdata = fonts.identifiers
+local definers = fonts.definers
+local specifiers = definers.specifiers
+
+specifiers.contextsetups = specifiers.contextsetups or { }
+specifiers.contextnumbers = specifiers.contextnumbers or { }
+specifiers.contextmerged = specifiers.contextmerged or { }
+specifiers.synonyms = specifiers.synonyms or { }
+
+local setups = specifiers.contextsetups
+local numbers = specifiers.contextnumbers
+local merged = specifiers.contextmerged
+local synonyms = specifiers.synonyms
local triggers = fonts.triggers
-- Beware, number can be shared between redefind features but as it is
@@ -60,28 +61,25 @@ name*context specification
</code>
--ldx]]--
-function specify.predefined(specification)
+local function predefined(specification)
local detail = specification.detail
- if detail ~= "" then
- -- detail = gsub(detail,"["..define.splitsymbols.."].*$","") -- get rid of *whatever specs and such
- if define.methods[detail] then -- since these may be appended at the
- specification.features.vtf = { preset = detail } -- tex end by default
- end
+ if detail ~= "" and definers.methods.variants[detail] then
+ specification.features.vtf = { preset = detail }
end
return specification
end
-define.register_split("@", specify.predefined)
+definers.registersplit("@", predefined)
-storage.register("fonts/setups" , define.specify.context_setups , "fonts.define.specify.context_setups" )
-storage.register("fonts/numbers", define.specify.context_numbers, "fonts.define.specify.context_numbers")
-storage.register("fonts/merged", define.specify.context_merged, "fonts.define.specify.context_merged")
-storage.register("fonts/synonyms", define.specify.synonyms, "fonts.define.specify.synonyms")
+storage.register("fonts/setups" , setups , "fonts.definers.specifiers.contextsetups" )
+storage.register("fonts/numbers", numbers, "fonts.definers.specifiers.contextnumbers")
+storage.register("fonts/merged", merged, "fonts.definers.specifiers.contextmerged")
+storage.register("fonts/synonyms", synonyms, "fonts.definers.specifiers.synonyms")
local normalize_meanings = fonts.otf.meanings.normalize
local default_features = fonts.otf.features.default
-local function preset_context(name,parent,features) -- currently otf only
+local function presetcontext(name,parent,features) -- currently otf only
if features == "" and find(parent,"=") then
features = parent
parent = ""
@@ -133,7 +131,7 @@ local function preset_context(name,parent,features) -- currently otf only
return number, t
end
-local function context_number(name) -- will be replaced
+local function contextnumber(name) -- will be replaced
local t = setups[name]
if not t then
return 0
@@ -164,7 +162,7 @@ local function context_number(name) -- will be replaced
end
end
-local function merge_context(currentnumber,extraname,option)
+local function mergecontext(currentnumber,extraname,option)
local current = setups[numbers[currentnumber]]
local extra = setups[extraname]
if extra then
@@ -194,13 +192,13 @@ local function merge_context(currentnumber,extraname,option)
numbers[number] = mergedname
merged[number] = option
setups[mergedname] = mergedfeatures
- return number -- context_number(mergedname)
+ return number -- contextnumber(mergedname)
else
return currentnumber
end
end
-local function register_context(fontnumber,extraname,option)
+local function registercontext(fontnumber,extraname,option)
local extra = setups[extraname]
if extra then
local mergedfeatures, mergedname = { }, nil
@@ -217,16 +215,16 @@ local function register_context(fontnumber,extraname,option)
numbers[number] = mergedname
merged[number] = option
setups[mergedname] = mergedfeatures
- return number -- context_number(mergedname)
+ return number -- contextnumber(mergedname)
else
return 0
end
end
-specify.preset_context = preset_context
-specify.context_number = context_number
-specify.merge_context = merge_context
-specify.register_context = register_context
+specifiers.presetcontext = presetcontext
+specifiers.contextnumber = contextnumber
+specifiers.mergecontext = mergecontext
+specifiers.registercontext = registercontext
local current_font = font.current
local tex_attribute = tex.attribute
@@ -238,7 +236,7 @@ function fonts.withset(name,what)
local hash = zero .. "+" .. name .. "*" .. what
local done = cache[hash]
if not done then
- done = merge_context(zero,name,what)
+ done = mergecontext(zero,name,what)
cache[hash] = done
end
tex_attribute[0] = done
@@ -249,25 +247,25 @@ function fonts.withfnt(name,what)
local hash = font .. "*" .. name .. "*" .. what
local done = cache[hash]
if not done then
- done = register_context(font,name,what)
+ done = registercontext(font,name,what)
cache[hash] = done
end
tex_attribute[0] = done
end
-function specify.show_context(name)
+function specifiers.showcontext(name)
return setups[name] or setups[numbers[name]] or setups[numbers[tonumber(name)]] or { }
end
-- todo: support a,b,c
-local function split_context(features) -- preset_context creates dummy here
- return setups[features] or (preset_context(features,"","") and setups[features])
+local function splitcontext(features) -- presetcontext creates dummy here
+ return setups[features] or (presetcontext(features,"","") and setups[features])
end
--~ local splitter = lpeg.splitat("=")
---~ local function split_context(features)
+--~ local function splitcontext(features)
--~ local setup = setups[features]
--~ if setup then
--~ return setup
@@ -299,30 +297,30 @@ end
--~ end
--~ end
--~ end
---~ setup = merge and preset_context(features,"",merge) and setups[features]
+--~ setup = merge and presetcontext(features,"",merge) and setups[features]
--~ -- actually we have to nil setups[features] in order to permit redefinitions
--~ setups[features] = nil
--~ end
---~ return setup or (preset_context(features,"","") and setups[features]) -- creates dummy
+--~ return setup or (presetcontext(features,"","") and setups[features]) -- creates dummy
--~ end
-specify.split_context = split_context
+specifiers.splitcontext = splitcontext
-function specify.context_tostring(name,kind,separator,yes,no,strict,omit) -- not used
+function specifiers.contexttostring(name,kind,separator,yes,no,strict,omit) -- not used
return hash_to_string(table.merged(fonts[kind].features.default or {},setups[name] or {}),separator,yes,no,strict,omit)
end
-function specify.starred(features) -- no longer fallbacks here
+local function starred(features) -- no longer fallbacks here
local detail = features.detail
if detail and detail ~= "" then
- features.features.normal = split_context(detail)
+ features.features.normal = splitcontext(detail)
else
features.features.normal = { }
end
return features
end
-define.register_split('*',specify.starred)
+definers.registersplit('*',starred)
-- define (two steps)
@@ -346,15 +344,15 @@ local splitpattern = spaces * value * spaces * rest
local specification --
-local get_specification = define.get_specification
+local getspecification = definers.getspecification
-- we can make helper macros which saves parsing (but normaly not
-- that many calls, e.g. in mk a couple of 100 and in metafun 3500)
-function define.command_1(str)
+function definers.stage_one(str)
statistics.starttiming(fonts)
local fullname, size = lpegmatch(splitpattern,str)
- local lookup, name, sub, method, detail = get_specification(fullname)
+ local lookup, name, sub, method, detail = getspecification(fullname)
if not name then
report_define("strange definition '%s'",str)
texsprint(ctxcatcodes,"\\fcglet\\somefontname\\defaultfontfile")
@@ -381,20 +379,20 @@ function define.command_1(str)
count.scaledfontmode = 0
texsprint(ctxcatcodes,"\\let\\somefontsize\\empty")
end
- specification = define.makespecification(str,lookup,name,sub,method,detail,size)
+ specification = definers.makespecification(str,lookup,name,sub,method,detail,size)
end
local n = 0
-- we can also move rscale to here (more consistent)
-function define.command_2(global,cs,str,size,classfeatures,fontfeatures,classfallbacks,fontfallbacks,
+function definers.stage_two(global,cs,str,size,classfeatures,fontfeatures,classfallbacks,fontfallbacks,
mathsize,textsize,relativeid,classgoodies,goodies)
if trace_defining then
report_define("memory usage before: %s",statistics.memused())
end
-- name is now resolved and size is scaled cf sa/mo
- local lookup, name, sub, method, detail = get_specification(str or "")
+ local lookup, name, sub, method, detail = getspecification(str or "")
-- asome settings can be overloaded
if lookup and lookup ~= "" then
specification.lookup = lookup
@@ -423,7 +421,7 @@ function define.command_2(global,cs,str,size,classfeatures,fontfeatures,classfal
elseif classfallbacks and classfallbacks ~= "" then
specification.fallbacks = classfallbacks
end
- local tfmdata = define.read(specification,size) -- id not yet known
+ local tfmdata = definers.read(specification,size) -- id not yet known
if not tfmdata then
report_define("unable to define %s as \\%s",name,cs)
texsetcount("global","lastfontid",-1)
@@ -441,9 +439,9 @@ function define.command_2(global,cs,str,size,classfeatures,fontfeatures,classfal
local id = font.define(tfmdata)
-- print(name,os.clock()-t)
tfmdata.id = id
- define.register(tfmdata,id)
+ definers.register(tfmdata,id)
tex.definefont(global,cs,id)
- tfm.cleanup_table(tfmdata)
+ tfm.cleanuptable(tfmdata)
if trace_defining then
report_define("defining %s with id %s as \\%s (features: %s/%s, fallbacks: %s/%s)",name,id,cs,classfeatures,fontfeatures,classfallbacks,fontfallbacks)
end
@@ -466,14 +464,14 @@ experiments.register("fonts.autorscale", function(v)
enable_auto_r_scale = v
end)
-local calculate_scale = fonts.tfm.calculate_scale
+local calculatescale = fonts.tfm.calculatescale
-- Not ok, we can best use a database for this. The problem is that we
-- have delayed definitions and so we never know what style is taken
-- as start.
-function fonts.tfm.calculate_scale(tfmtable, scaledpoints, relativeid)
- local scaledpoints, delta, units = calculate_scale(tfmtable,scaledpoints)
+function fonts.tfm.calculatescale(tfmtable, scaledpoints, relativeid)
+ local scaledpoints, delta, units = calculatescale(tfmtable,scaledpoints)
--~ if enable_auto_r_scale and relativeid then -- for the moment this is rather context specific
--~ local relativedata = fontdata[relativeid]
--~ local rfmtable = relativedata and relativedata.unscaled and relativedata.unscaled
@@ -531,7 +529,7 @@ end
-- for the moment here, this will become a chain of extras that is
-- hooked into the ctx registration (or scaler or ...)
-function fonts.set_digit_width(font) -- max(quad/2,wd(0..9))
+local function digitwidth(font) -- max(quad/2,wd(0..9))
local tfmtable = fontdata[font]
local parameters = tfmtable.parameters
local width = parameters.digitwidth
@@ -549,7 +547,8 @@ function fonts.set_digit_width(font) -- max(quad/2,wd(0..9))
return width
end
-fonts.get_digit_width = fonts.set_digit_width
+fonts.getdigitwidth = digitwidth
+fonts.setdigitwidth = digitwidth
-- soon to be obsolete:
@@ -669,7 +668,7 @@ function fonts.showfontparameters()
end
end
-function fonts.report_defined_fonts()
+function fonts.reportdefinedfonts()
if trace_usage then
local t = { }
for id, data in table.sortedhash(fonts.ids) do
@@ -699,9 +698,9 @@ function fonts.report_defined_fonts()
end
end
-luatex.registerstopactions(fonts.report_defined_fonts)
+luatex.registerstopactions(fonts.reportdefinedfonts)
-function fonts.report_used_features()
+function fonts.reportusedfeatures()
-- numbers, setups, merged
if trace_usage then
local t = { }
@@ -722,4 +721,4 @@ function fonts.report_used_features()
end
end
end
-luatex.registerstopactions(fonts.report_used_features)
+luatex.registerstopactions(fonts.reportusedfeatures)
diff --git a/tex/context/base/font-def.lua b/tex/context/base/font-def.lua
index a466ca655..3f585b908 100644
--- a/tex/context/base/font-def.lua
+++ b/tex/context/base/font-def.lua
@@ -10,6 +10,8 @@ local format, concat, gmatch, match, find, lower = string.format, table.concat,
local tostring, next = tostring, next
local lpegmatch = lpeg.match
+local allocate = utilities.storage.allocate
+
local trace_defining = false trackers .register("fonts.defining", function(v) trace_defining = v end)
local directive_embedall = false directives.register("fonts.embedall", function(v) directive_embedall = v end)
@@ -24,33 +26,38 @@ local report_afm = logs.new("load afm")
default loader that only handles <l n='tfm'/>.</p>
--ldx]]--
-local fonts = fonts
-local tfm = fonts.tfm
-local vf = fonts.vf
-local fontids = fonts.ids
+local fonts = fonts
+local tfm = fonts.tfm
+local vf = fonts.vf
+local fontids = fonts.ids
+
+fonts.used = allocate()
-fonts.used = fonts.used or { }
+tfm.readers = tfm.readers or { }
+tfm.fonts = allocate()
+tfm.internalized = allocate() -- internal tex numbers
-tfm.readers = tfm.readers or { }
-tfm.fonts = tfm.fonts or { }
-tfm.internalized = tfm.internalized or { } -- internal tex numbers
+local readers = tfm.readers
+local sequence = allocate { 'otf', 'ttf', 'afm', 'tfm' }
+readers.sequence = sequence
-local readers = tfm.readers
-local sequence = { 'otf', 'ttf', 'afm', 'tfm' }
-readers.sequence = sequence
+tfm.version = 1.01
+tfm.cache = containers.define("fonts", "tfm", tfm.version, false) -- better in font-tfm
+tfm.autoprefixedafm = true -- this will become false some day (catches texnansi-blabla.*)
-tfm.version = 1.01
-tfm.cache = containers.define("fonts", "tfm", tfm.version, false) -- better in font-tfm
-tfm.auto_afm = true
+fonts.definers = fonts.definers or { }
+local definers = fonts.definers
-fonts.define = fonts.define or { }
-local define = fonts.define
+definers.specifiers = definers.specifiers or { }
+local specifiers = definers.specifiers
-define.method = "afm or tfm" -- afm, tfm, afm or tfm, tfm or afm
-define.specify = define.specify or { }
-define.methods = define.methods or { }
+specifiers.variants = allocate()
+local variants = specifiers.variants
-local findbinfile = resolvers.findbinfile
+definers.method = "afm or tfm" -- afm, tfm, afm or tfm, tfm or afm
+definers.methods = definers.methods or { }
+
+local findbinfile = resolvers.findbinfile
--[[ldx--
<p>We hardly gain anything when we cache the final (pre scaled)
@@ -79,7 +86,7 @@ and prepares a table that will move along as we proceed.</p>
-- name name(sub) name(sub)*spec name*spec
-- name@spec*oeps
-local splitter, specifiers = nil, ""
+local splitter, splitspecifiers = nil, ""
local P, C, S, Cc = lpeg.P, lpeg.C, lpeg.S, lpeg.Cc
@@ -88,13 +95,13 @@ local right = P(")")
local colon = P(":")
local space = P(" ")
-define.defaultlookup = "file"
+definers.defaultlookup = "file"
local prefixpattern = P(false)
-function define.add_specifier(symbol)
- specifiers = specifiers .. symbol
- local method = S(specifiers)
+local function addspecifier(symbol)
+ splitspecifiers = splitspecifiers .. symbol
+ local method = S(splitspecifiers)
local lookup = C(prefixpattern) * colon
local sub = left * C(P(1-left-right-method)^1) * right
local specification = C(method) * C(P(1)^1)
@@ -102,24 +109,28 @@ function define.add_specifier(symbol)
splitter = P((lookup + Cc("")) * name * (sub + Cc("")) * (specification + Cc("")))
end
-function define.add_lookup(str,default)
+local function addlookup(str,default)
prefixpattern = prefixpattern + P(str)
end
-define.add_lookup("file")
-define.add_lookup("name")
-define.add_lookup("spec")
+definers.addlookup = addlookup
+
+addlookup("file")
+addlookup("name")
+addlookup("spec")
-function define.get_specification(str)
+local function getspecification(str)
return lpegmatch(splitter,str)
end
-function define.register_split(symbol,action)
- define.add_specifier(symbol)
- define.specify[symbol] = action
+definers.getspecification = getspecification
+
+function definers.registersplit(symbol,action)
+ addspecifier(symbol)
+ variants[symbol] = action
end
-function define.makespecification(specification, lookup, name, sub, method, detail, size)
+function definers.makespecification(specification, lookup, name, sub, method, detail, size)
size = size or 655360
if trace_defining then
report_define("%s -> lookup: %s, name: %s, sub: %s, method: %s, detail: %s",
@@ -127,7 +138,7 @@ function define.makespecification(specification, lookup, name, sub, method, deta
(sub ~= "" and sub) or "-", (method ~= "" and method) or "-", (detail ~= "" and detail) or "-")
end
if not lookup or lookup == "" then
- lookup = define.defaultlookup
+ lookup = definers.defaultlookup
end
local t = {
lookup = lookup, -- forced type
@@ -144,10 +155,10 @@ function define.makespecification(specification, lookup, name, sub, method, deta
return t
end
-function define.analyze(specification, size)
+function definers.analyze(specification, size)
-- can be optimized with locals
- local lookup, name, sub, method, detail = define.get_specification(specification or "")
- return define.makespecification(specification, lookup, name, sub, method, detail, size)
+ local lookup, name, sub, method, detail = getspecification(specification or "")
+ return definers.makespecification(specification, lookup, name, sub, method, detail, size)
end
--[[ldx--
@@ -156,7 +167,7 @@ end
local sortedhashkeys = table.sortedhashkeys
-function tfm.hash_features(specification)
+function tfm.hashfeatures(specification)
local features = specification.features
if features then
local t = { }
@@ -188,7 +199,7 @@ function tfm.hash_features(specification)
return "unknown"
end
-fonts.designsizes = { }
+fonts.designsizes = allocate()
--[[ldx--
<p>In principle we can share tfm tables when we are in node for a font, but then
@@ -198,10 +209,10 @@ when we get rid of base mode we can optimize even further by sharing, but then w
loose our testcases for <l n='luatex'/>.</p>
--ldx]]--
-function tfm.hash_instance(specification,force)
+function tfm.hashinstance(specification,force)
local hash, size, fallbacks = specification.hash, specification.size, specification.fallbacks
if force or not hash then
- hash = tfm.hash_features(specification)
+ hash = tfm.hashfeatures(specification)
specification.hash = hash
end
if size < 1000 and fonts.designsizes[hash] then
@@ -229,8 +240,8 @@ end
<p>We can resolve the filename using the next function:</p>
--ldx]]--
-define.resolvers = define.resolvers or { }
-local resolvers = define.resolvers
+definers.resolvers = definers.resolvers or { }
+local resolvers = definers.resolvers
-- todo: reporter
@@ -274,7 +285,7 @@ function resolvers.spec(specification)
end
end
-function define.resolve(specification)
+function definers.resolve(specification)
if not specification.resolved or specification.resolved == "" then -- resolved itself not per se in mapping hash
local r = resolvers[specification.lookup]
if r then
@@ -295,7 +306,7 @@ function define.resolve(specification)
end
end
--
- specification.hash = lower(specification.name .. ' @ ' .. tfm.hash_features(specification))
+ specification.hash = lower(specification.name .. ' @ ' .. tfm.hashfeatures(specification))
if specification.sub and specification.sub ~= "" then
specification.hash = specification.sub .. ' @ ' .. specification.hash
end
@@ -319,7 +330,7 @@ specification yet.</p>
--ldx]]--
function tfm.read(specification)
- local hash = tfm.hash_instance(specification)
+ local hash = tfm.hashinstance(specification)
local tfmtable = tfm.fonts[hash] -- hashes by size !
if not tfmtable then
local forced = specification.forced or ""
@@ -367,22 +378,22 @@ end
<p>For virtual fonts we need a slightly different approach:</p>
--ldx]]--
-function tfm.read_and_define(name,size) -- no id
- local specification = define.analyze(name,size)
+function tfm.readanddefine(name,size) -- no id
+ local specification = definers.analyze(name,size)
local method = specification.method
- if method and define.specify[method] then
- specification = define.specify[method](specification)
+ if method and variants[method] then
+ specification = variants[method](specification)
end
- specification = define.resolve(specification)
- local hash = tfm.hash_instance(specification)
- local id = define.registered(hash)
+ specification = definers.resolve(specification)
+ local hash = tfm.hashinstance(specification)
+ local id = definers.registered(hash)
if not id then
local fontdata = tfm.read(specification)
if fontdata then
fontdata.hash = hash
id = font.define(fontdata)
- define.register(fontdata,id)
- tfm.cleanup_table(fontdata)
+ definers.register(fontdata,id)
+ tfm.cleanuptable(fontdata)
else
id = 0 -- signal
end
@@ -402,6 +413,9 @@ local function check_tfm(specification,fullname)
if foundname == "" then
foundname = findbinfile(fullname, 'ofm') or "" -- bonus for usage outside context
end
+ if foundname == "" then
+ foundname = fonts.names.getfilename(fullname,"tfm")
+ end
if foundname ~= "" then
specification.filename, specification.format = foundname, "ofm"
return tfm.read_from_tfm(specification)
@@ -410,13 +424,15 @@ end
local function check_afm(specification,fullname)
local foundname = findbinfile(fullname, 'afm') or "" -- just to be sure
- if foundname == "" and tfm.auto_afm then
+ if foundname == "" then
+ foundname = fonts.names.getfilename(fullname,"afm")
+ end
+ if foundname == "" and tfm.autoprefixedafm then
local encoding, shortname = match(fullname,"^(.-)%-(.*)$") -- context: encoding-name.*
if encoding and shortname and fonts.enc.known[encoding] then
shortname = findbinfile(shortname,'afm') or "" -- just to be sure
if shortname ~= "" then
foundname = shortname
- -- tfm.set_normal_feature(specification,'encoding',encoding) -- will go away
if trace_loading then
report_afm("stripping encoding prefix from filename %s",afmname)
end
@@ -453,7 +469,7 @@ function readers.afm(specification,method)
tfmtable = check_afm(specification,specification.name .. "." .. forced)
end
if not tfmtable then
- method = method or define.method or "afm or tfm"
+ method = method or definers.method or "afm or tfm"
if method == "tfm" then
tfmtable = check_tfm(specification,specification.name)
elseif method == "afm" then
@@ -478,21 +494,26 @@ local function check_otf(forced,specification,suffix,what)
name = file.addsuffix(name,suffix,true)
end
local fullname, tfmtable = findbinfile(name,suffix) or "", nil -- one shot
+ -- if false then -- can be enabled again when needed
+ -- if fullname == "" then
+ -- local fb = fonts.names.old_to_new[name]
+ -- if fb then
+ -- fullname = findbinfile(fb,suffix) or ""
+ -- end
+ -- end
+ -- if fullname == "" then
+ -- local fb = fonts.names.new_to_old[name]
+ -- if fb then
+ -- fullname = findbinfile(fb,suffix) or ""
+ -- end
+ -- end
+ -- end
if fullname == "" then
- local fb = fonts.names.old_to_new[name]
- if fb then
- fullname = findbinfile(fb,suffix) or ""
- end
- end
- if fullname == "" then
- local fb = fonts.names.new_to_old[name]
- if fb then
- fullname = findbinfile(fb,suffix) or ""
- end
+ fullname = fonts.names.getfilename(name,suffix)
end
if fullname ~= "" then
specification.filename, specification.format = fullname, what -- hm, so we do set the filename, then
- tfmtable = tfm.read_from_open_type(specification) -- we need to do it for all matches / todo
+ tfmtable = tfm.read_from_otf(specification) -- we need to do it for all matches / todo
end
return tfmtable
end
@@ -518,7 +539,7 @@ function readers.dfont(specification) return readers.opentype(specification,"ttf
a helper function.</p>
--ldx]]--
-function define.check(features,defaults) -- nb adapts features !
+function definers.check(features,defaults) -- nb adapts features !
local done = false
if features and next(features) then
for k,v in next, defaults do
@@ -533,7 +554,7 @@ function define.check(features,defaults) -- nb adapts features !
end
--[[ldx--
-<p>So far the specifyers. Now comes the real definer. Here we cache
+<p>So far the specifiers. Now comes the real definer. Here we cache
based on id's. Here we also intercept the virtual font handler. Since
it evolved stepwise I may rewrite this bit (combine code).</p>
@@ -544,9 +565,13 @@ not gain much. By the way, passing id's back to in the callback was
introduced later in the development.</p>
--ldx]]--
-define.last = nil
+local lastdefined = nil -- we don't want this one to end up in s-tra-02
+
+function definers.current() -- or maybe current
+ return lastdefined
+end
-function define.register(fontdata,id)
+function definers.register(fontdata,id)
if fontdata and id then
local hash = fontdata.hash
if not tfm.internalized[hash] then
@@ -562,7 +587,7 @@ function define.register(fontdata,id)
end
end
-function define.registered(hash)
+function definers.registered(hash)
local id = tfm.internalized[hash]
return id, id and fonts.ids[id]
end
@@ -577,7 +602,7 @@ function tfm.make(specification)
-- however, when virtual tricks are used as feature (makes more
-- sense) we scale the commands in fonts.tfm.scale (and set the
-- factor there)
- local fvm = define.methods[specification.features.vtf.preset]
+ local fvm = definers.methods.variants[specification.features.vtf.preset]
if fvm then
return fvm(specification)
else
@@ -585,28 +610,28 @@ function tfm.make(specification)
end
end
-function define.read(specification,size,id) -- id can be optional, name can already be table
+function definers.read(specification,size,id) -- id can be optional, name can already be table
statistics.starttiming(fonts)
if type(specification) == "string" then
- specification = define.analyze(specification,size)
+ specification = definers.analyze(specification,size)
end
local method = specification.method
- if method and define.specify[method] then
- specification = define.specify[method](specification)
+ if method and variants[method] then
+ specification = variants[method](specification)
end
- specification = define.resolve(specification)
- local hash = tfm.hash_instance(specification)
+ specification = definers.resolve(specification)
+ local hash = tfm.hashinstance(specification)
if cache_them then
local fontdata = containers.read(fonts.cache,hash) -- for tracing purposes
end
- local fontdata = define.registered(hash) -- id
+ local fontdata = definers.registered(hash) -- id
if not fontdata then
if specification.features.vtf and specification.features.vtf.preset then
fontdata = tfm.make(specification)
else
fontdata = tfm.read(specification)
if fontdata then
- tfm.check_virtual_id(fontdata)
+ tfm.checkvirtualid(fontdata)
end
end
if cache_them then
@@ -616,11 +641,11 @@ function define.read(specification,size,id) -- id can be optional, name can alre
fontdata.hash = hash
fontdata.cache = "no"
if id then
- define.register(fontdata,id)
+ definers.register(fontdata,id)
end
end
end
- define.last = fontdata or id -- todo ! ! ! ! !
+ lastdefined = fontdata or id -- todo ! ! ! ! !
if not fontdata then
report_define( "unknown font %s, loading aborted",specification.name)
elseif trace_defining and type(fontdata) == "table" then
@@ -640,7 +665,7 @@ end
function vf.find(name)
name = file.removesuffix(file.basename(name))
- if tfm.resolve_vf then
+ if tfm.resolvevirtualtoo then
local format = fonts.logger.format(name)
if format == 'tfm' or format == 'ofm' then
if trace_defining then
@@ -665,5 +690,5 @@ end
<p>We overload both the <l n='tfm'/> and <l n='vf'/> readers.</p>
--ldx]]--
-callbacks.register('define_font' , define.read, "definition of fonts (tfmtable preparation)")
+callbacks.register('define_font' , definers.read, "definition of fonts (tfmtable preparation)")
callbacks.register('find_vf_file', vf.find , "locating virtual fonts, insofar needed") -- not that relevant any more
diff --git a/tex/context/base/font-dum.lua b/tex/context/base/font-dum.lua
index bac4f51af..00663daf4 100644
--- a/tex/context/base/font-dum.lua
+++ b/tex/context/base/font-dum.lua
@@ -10,9 +10,9 @@ fonts = fonts or { }
-- general
-fonts.otf.pack = false -- only makes sense in context
-fonts.tfm.resolve_vf = false -- no sure about this
-fonts.tfm.fontname_mode = "specification" -- somehow latex needs this
+fonts.otf.pack = false -- only makes sense in context
+fonts.tfm.resolvevirtualtoo = false -- context specific (du eto resolver)
+fonts.tfm.fontnamemode = "specification" -- somehow latex needs this (changed name!)
-- readers
@@ -22,13 +22,12 @@ fonts.tfm.readers.afm = nil
-- define
-fonts.define = fonts.define or { }
+fonts.definers = fonts.definers or { }
+fonts.definers.specifiers = fonts.definers.specifiers or { }
---~ fonts.define.method = "tfm"
+fonts.definers.specifiers.specifiers.colonizedpreference = "name" -- is "file" in context
-fonts.define.specify.colonized_default_lookup = "name"
-
-function fonts.define.get_specification(str)
+function fonts.definers.getspecification(str)
return "", str, "", ":", str
end
@@ -63,7 +62,7 @@ function fonts.names.resolve(name,sub)
if basename and basename ~= "" then
for i=1,#fileformats do
local format = fileformats[i]
- local foundname = resolvers.find_file(basename,format) or ""
+ local foundname = resolvers.findfile(basename,format) or ""
if foundname ~= "" then
data = dofile(foundname)
break
@@ -210,7 +209,7 @@ fonts.initializers.node.otf.expansion = fonts.initializers.common.expansion
-- left over
-function fonts.register_message()
+function fonts.registermessage()
end
-- example vectors
@@ -261,6 +260,12 @@ fonts.otf.meanings.normalize = fonts.otf.meanings.normalize or function(t)
end
end
+-- needed (different in context)
+
+function otf.scriptandlanguage(tfmdata)
+ return tfmdata.script, tfmdata.language
+end
+
-- bonus
function fonts.otf.nametoslot(name)
diff --git a/tex/context/base/font-enc.lua b/tex/context/base/font-enc.lua
index 648ccf4fb..000fb21bb 100644
--- a/tex/context/base/font-enc.lua
+++ b/tex/context/base/font-enc.lua
@@ -19,7 +19,7 @@ local enc = fonts.enc
enc.version = 1.03
enc.cache = containers.define("fonts", "enc", fonts.enc.version, true)
-enc.known = { -- sort of obsolete
+enc.known = utilities.storage.allocate { -- sort of obsolete
texnansi = true,
ec = true,
qx = true,
@@ -66,7 +66,7 @@ function enc.load(filename)
return data
end
local vector, tag, hash, unicodes = { }, "", { }, { }
- local foundname = resolvers.find_file(filename,'enc')
+ local foundname = resolvers.findfile(filename,'enc')
if foundname and foundname ~= "" then
local ok, encoding, size = resolvers.loadbinfile(foundname)
if ok and encoding then
@@ -91,11 +91,11 @@ function enc.load(filename)
end
end
local data = {
- name=name,
- tag=tag,
- vector=vector,
- hash=hash,
- unicodes=unicodes
+ name = name,
+ tag = tag,
+ vector = vector,
+ hash = hash,
+ unicodes = unicodes
}
return containers.write(enc.cache, name, data)
end
diff --git a/tex/context/base/font-ext.lua b/tex/context/base/font-ext.lua
index 9888a578e..e083f1edb 100644
--- a/tex/context/base/font-ext.lua
+++ b/tex/context/base/font-ext.lua
@@ -12,6 +12,8 @@ local gmatch, concat = string.gmatch, table.concat
local utfchar = utf.char
local getparameters = utilities.parsers.getparameters
+local allocate = utilities.storage.allocate
+
local trace_protrusion = false trackers.register("fonts.protrusion", function(v) trace_protrusion = v end)
local trace_expansion = false trackers.register("fonts.expansion", function(v) trace_expansion = v end)
@@ -124,13 +126,13 @@ end
-- expansion (hz)
-- -- -- -- -- --
-fonts.expansions = fonts.expansions or { }
+fonts.expansions = allocate()
local expansions = fonts.expansions
-expansions.classes = expansions.classes or { }
-expansions.vectors = expansions.vectors or { }
-
+expansions.classes = allocate()
local classes = expansions.classes
+
+expansions.vectors = allocate()
local vectors = expansions.vectors
-- beware, pdftex itself uses percentages * 10
@@ -231,13 +233,13 @@ local report_opbd = logs.new("otf opbd")
-- protrusion
-- -- -- -- -- --
-fonts.protrusions = fonts.protrusions or { }
+fonts.protrusions = allocate()
local protrusions = fonts.protrusions
-protrusions.classes = protrusions.classes or { }
+protrusions.classes = allocate()
protrusions.vectors = protrusions.vectors or { }
-local classes = protrusions.classes
+local classes = allocate()
local vectors = protrusions.vectors
-- the values need to be revisioned
@@ -390,7 +392,7 @@ local function map_opbd_onto_protrusion(tfmdata,value,opbd)
factor = tonumber(value) or 1
end
if opbd ~= "right" then
- local validlookups, lookuplist = fonts.otf.collect_lookups(otfdata,"lfbd",script,language)
+ local validlookups, lookuplist = otf.collectlookups(otfdata,"lfbd",script,language)
if validlookups then
for i=1,#lookuplist do
local lookup = lookuplist[i]
@@ -413,7 +415,7 @@ local function map_opbd_onto_protrusion(tfmdata,value,opbd)
end
end
if opbd ~= "left" then
- local validlookups, lookuplist = fonts.otf.collect_lookups(otfdata,"rtbd",script,language)
+ local validlookups, lookuplist = otf.collectlookups(otfdata,"rtbd",script,language)
if validlookups then
for i=1,#lookuplist do
local lookup = lookuplist[i]
@@ -634,8 +636,8 @@ end
methods.node.otf.formatters = processformatters
methods.base.otf.formatters = processformatters
-fonts.otf.tables.features['formatters'] = 'Hide Formatting Characters'
+otf.tables.features['formatters'] = 'Hide Formatting Characters'
-fonts.otf.features.register("formatters")
+otf.features.register("formatters")
table.insert(manipulators,"formatters") -- at end
diff --git a/tex/context/base/font-fbk.lua b/tex/context/base/font-fbk.lua
index d4692e341..fe8f64550 100644
--- a/tex/context/base/font-fbk.lua
+++ b/tex/context/base/font-fbk.lua
@@ -10,6 +10,8 @@ local cos, tan, rad, format = math.cos, math.tan, math.rad, string.format
local trace_combining = false trackers.register("fonts.combining", function(v) trace_combining = v end)
+local allocate = utilities.storage.allocate
+
--[[ldx--
<p>This is very experimental code!</p>
--ldx]]--
@@ -18,7 +20,7 @@ local fonts = fonts
local vf = fonts.vf
local tfm = fonts.tfm
-fonts.fallbacks = fonts.fallbacks or { }
+fonts.fallbacks = allocate()
local fallbacks = fonts.fallbacks
local commands = vf.aux.combine.commands
@@ -327,7 +329,7 @@ commands["disable-force"] = function(g,v)
force_fallback = false
end
-local install = fonts.define.methods.install
+local install = fonts.definers.methods.install
install("fallback", { -- todo: auto-fallback with loop over data.characters
{ "fake-character", 0x00A2, 'textcent' },
diff --git a/tex/context/base/font-gds.lua b/tex/context/base/font-gds.lua
index 80dc0ca55..ce1184292 100644
--- a/tex/context/base/font-gds.lua
+++ b/tex/context/base/font-gds.lua
@@ -13,6 +13,8 @@ local trace_goodies = false trackers.register("fonts.goodies", function(v) trac
local report_fonts = logs.new("fonts")
+local allocate = utilities.storage.allocate
+
-- goodies=name,colorscheme=,featureset=
--
-- goodies=auto
@@ -23,10 +25,10 @@ local node = node
fonts.goodies = fonts.goodies or { }
local fontgoodies = fonts.goodies
-fontgoodies.data = fontgoodies.data or { }
+fontgoodies.data = allocate() -- fontgoodies.data or { }
local data = fontgoodies.data
-fontgoodies.list = fontgoodies.list or { }
+fontgoodies.list = fontgoodies.list or { } -- no allocate as we want to see what is there
local list = fontgoodies.list
function fontgoodies.report(what,trace,goodies)
@@ -43,9 +45,9 @@ local function getgoodies(filename) -- maybe a merge is better
if goodies ~= nil then
-- found or tagged unfound
elseif type(filename) == "string" then
- local fullname = resolvers.find_file(file.addsuffix(filename,"lfg")) or "" -- prefered suffix
+ local fullname = resolvers.findfile(file.addsuffix(filename,"lfg")) or "" -- prefered suffix
if fullname == "" then
- fullname = resolvers.find_file(file.addsuffix(filename,"lua")) or "" -- fallback suffix
+ fullname = resolvers.findfile(file.addsuffix(filename,"lua")) or "" -- fallback suffix
end
if fullname == "" then
report_fonts("goodie file '%s.lfg' is not found",filename)
@@ -77,7 +79,7 @@ fontgoodies.get = getgoodies
-- register goodies file
-local preset_context = fonts.define.specify.preset_context
+local presetcontext = fonts.definers.specifiers.presetcontext
local function setgoodies(tfmdata,value)
local goodies = tfmdata.goodies or { } -- future versions might store goodies in the cached instance
@@ -124,7 +126,7 @@ function fontgoodies.prepare_features(goodies,name,set)
if set then
local ff = flattenedfeatures(set)
local fullname = goodies.name .. "::" .. name
- local n, s = preset_context(fullname,"",ff)
+ local n, s = presetcontext(fullname,"",ff)
goodies.featuresets[name] = s -- set
if trace_goodies then
report_fonts("feature set '%s' gets number %s and name '%s'",name,n,fullname)
@@ -209,7 +211,7 @@ local function set_colorscheme(tfmdata,scheme)
end
local fontdata = fonts.ids
-local fcs = fonts.color.set
+local fcs = fonts.colors.set
local has_attribute = node.has_attribute
local traverse_id = node.traverse_id
local a_colorscheme = attributes.private('colorscheme')
diff --git a/tex/context/base/font-ini.lua b/tex/context/base/font-ini.lua
index 210edbbe8..e068dae55 100644
--- a/tex/context/base/font-ini.lua
+++ b/tex/context/base/font-ini.lua
@@ -14,6 +14,7 @@ local utf = unicode.utf8
local format, serialize = string.format, table.serialize
local write_nl = texio.write_nl
local lower = string.lower
+local allocate, mark = utilities.storage.allocate, utilities.storage.mark
local report_define = logs.new("define fonts")
@@ -26,9 +27,11 @@ fonts = fonts or { }
-- we will also have des and fam hashes
-fonts.ids = fonts.ids or { } fonts.identifiers = fonts.ids -- aka fontdata
-fonts.chr = fonts.chr or { } fonts.characters = fonts.chr -- aka chardata
-fonts.qua = fonts.qua or { } fonts.quads = fonts.qua -- aka quaddata
+-- beware, soem alreadyu defined
+
+fonts.ids = mark(fonts.ids or { }) fonts.identifiers = fonts.ids -- aka fontdata
+fonts.chr = mark(fonts.chr or { }) fonts.characters = fonts.chr -- aka chardata
+fonts.qua = mark(fonts.qua or { }) fonts.quads = fonts.qua -- aka quaddata
fonts.tfm = fonts.tfm or { }
fonts.vf = fonts.vf or { }
@@ -36,7 +39,7 @@ fonts.afm = fonts.afm or { }
fonts.pfb = fonts.pfb or { }
fonts.otf = fonts.otf or { }
-fonts.private = 0xF0000 -- 0x10FFFF
+fonts.privateoffset = 0xF0000 -- 0x10FFFF
fonts.verbose = false -- more verbose cache tables
fonts.ids[0] = { -- nullfont
@@ -70,15 +73,15 @@ fonts.processors = fonts.processors or {
fonts.manipulators = fonts.manipulators or {
}
-fonts.define = fonts.define or { }
-fonts.define.specify = fonts.define.specify or { }
-fonts.define.specify.synonyms = fonts.define.specify.synonyms or { }
+fonts.definers = fonts.definers or { }
+fonts.definers.specifiers = fonts.definers.specifiers or { }
+fonts.definers.specifiers.synonyms = fonts.definers.specifiers.synonyms or { }
-- tracing
if not fonts.color then
- fonts.color = {
+ fonts.color = allocate {
set = function() end,
reset = function() end,
}
@@ -87,7 +90,7 @@ end
-- format identification
-fonts.formats = { }
+fonts.formats = allocate()
function fonts.fontformat(filename,default)
local extname = lower(file.extname(filename))
diff --git a/tex/context/base/font-ini.mkiv b/tex/context/base/font-ini.mkiv
index 4b813d7f6..2987bd36d 100644
--- a/tex/context/base/font-ini.mkiv
+++ b/tex/context/base/font-ini.mkiv
@@ -59,6 +59,7 @@
\registerctxluafile{font-clr}{1.001}
\registerctxluafile{node-fnt}{1.001} % here
\registerctxluafile{font-enc}{1.001}
+\registerctxluafile{font-agl}{1.001} % uses enc, needed in map
\registerctxluafile{font-map}{1.001}
\registerctxluafile{font-syn}{1.001}
\registerctxluafile{font-log}{1.001}
@@ -72,7 +73,7 @@
\registerctxluafile{font-oti}{1.001} % otf initialization
\registerctxluafile{font-otb}{1.001} % otf main base
\registerctxluafile{font-otn}{1.001} % otf main node
-\registerctxluafile{font-ota}{1.001} % otf analyzers
+\registerctxluafile{font-ota}{1.001} % otf analyzers (needs dynamics)
\registerctxluafile{font-otp}{1.001} % otf pack
\registerctxluafile{font-otc}{1.001} % otf context
\registerctxluafile{font-oth}{1.001} % otf helpers
@@ -85,7 +86,6 @@
\registerctxluafile{font-ext}{1.001}
\registerctxluafile{font-pat}{1.001}
\registerctxluafile{font-chk}{1.001}
-\registerctxluafile{font-agl}{1.001}
\unprotect
@@ -699,7 +699,7 @@
\def\lowleveldefinefont#1#2% #2 = cs
{%
- \ctxlua{fonts.define.command_1("\luaescapestring{#1}")}% the escapestring catches at \somedimen
+ \ctxlua{fonts.definers.stage_one("\luaescapestring{#1}")}% the escapestring catches at \somedimen
% sets \scaledfontmode and \somefontname and \somefontsize
\ifcase\scaledfontmode\relax
% none, avoid the designsize if possible
@@ -730,7 +730,7 @@
\fi
\updatefontparameters
\updatefontclassparameters
- \ctxlua{fonts.define.command_2(
+ \ctxlua{fonts.definers.stage_two(
\ifx\fontclass\empty false\else true\fi,
"#2", % cs, trailing % is gone
"\somefontfile",
@@ -2713,7 +2713,7 @@
\def\dodefinefontfeature[#1][#2][#3]%
{\global\expandafter\chardef\csname\??fq=#1\endcsname % beware () needed as we get two values returned
- \ctxlua{tex.write((fonts.define.specify.preset_context("#1","#2","#3")))}\relax}
+ \ctxlua{tex.write((fonts.definers.specifiers.presetcontext("#1","#2","#3")))}\relax}
\definefontfeature
[default]
@@ -2798,7 +2798,7 @@
{\dodoubleargument\dofontfeatureslist}
\def\dofontfeatureslist[#1][#2]% todo: arg voor type
- {\ctxlua{tex.sprint(tex.ctxcatcodes,fonts.define.specify.context_tostring("#1","otf","\luaescapestring{#2}","yes","no",true,{"number"}))}}
+ {\ctxlua{tex.sprint(tex.ctxcatcodes,fonts.definers.specifiers.contexttostring("#1","otf","\luaescapestring{#2}","yes","no",true,{"number"}))}}
\attribute\zerocount\zerocount % first in list, so fast match
@@ -2819,7 +2819,7 @@
%
% \typebuffer \getbuffer
-\def\featureattribute#1{\ctxlua{tex.sprint(fonts.define.specify.context_number("#1"))}}
+\def\featureattribute#1{\ctxlua{tex.sprint(fonts.definers.specifiers.contextnumber("#1"))}}
\def\setfontfeature #1{\edef\currentfeature{#1}\attribute\zerocount\featureattribute{#1}\relax}
\def\resetfontfeature#1{\let\currentfeature\empty\attribute\zerocount\zerocount} % initial value
diff --git a/tex/context/base/font-map.lua b/tex/context/base/font-map.lua
index 6230ee326..5fa4170d7 100644
--- a/tex/context/base/font-map.lua
+++ b/tex/context/base/font-map.lua
@@ -27,9 +27,9 @@ of obsolete. Some code may move to runtime or auxiliary modules.</p>
local fonts = fonts
fonts.map = fonts.map or { }
-local function load_lum_table(filename) -- will move to font goodies
+local function loadlumtable(filename) -- will move to font goodies
local lumname = file.replacesuffix(file.basename(filename),"lum")
- local lumfile = resolvers.find_file(lumname,"map") or ""
+ local lumfile = resolvers.findfile(lumname,"map") or ""
if lumfile ~= "" and lfs.isfile(lumfile) then
if trace_loading or trace_unimapping then
report_otf("enhance: loading %s ",lumfile)
@@ -54,7 +54,7 @@ local parser = unicode + ucode + index
local parsers = { }
-local function make_name_parser(str)
+local function makenameparser(str)
if not str or str == "" then
return parser
else
@@ -67,8 +67,8 @@ local function make_name_parser(str)
end
end
---~ local parser = fonts.map.make_name_parser("Japan1")
---~ local parser = fonts.map.make_name_parser()
+--~ local parser = fonts.map.makenameparser("Japan1")
+--~ local parser = fonts.map.makenameparser()
--~ local function test(str)
--~ local b, a = lpegmatch(parser,str)
--~ print((a and table.serialize(b)) or b)
@@ -122,8 +122,8 @@ end
--~ return s
--~ end
-fonts.map.load_lum_table = load_lum_table
-fonts.map.make_name_parser = make_name_parser
+fonts.map.loadlumtable = loadlumtable
+fonts.map.makenameparser = makenameparser
fonts.map.tounicode16 = tounicode16
fonts.map.tounicode16sequence = tounicode16sequence
@@ -137,7 +137,7 @@ local ligsplitter = Ct(other * (separator * other)^0)
--~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more")))
--~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more.that")))
-fonts.map.add_to_unicode = function(data,filename)
+fonts.map.addtounicode = function(data,filename)
local unicodes = data.luatex and data.luatex.unicodes
if not unicodes then
return
@@ -148,11 +148,11 @@ fonts.map.add_to_unicode = function(data,filename)
unicodes['zwj'] = unicodes['zwj'] or 0x200D
unicodes['zwnj'] = unicodes['zwnj'] or 0x200C
-- the tounicode mapping is sparse and only needed for alternatives
- local tounicode, originals, ns, nl, private, unknown = { }, { }, 0, 0, fonts.private, format("%04X",utfbyte("?"))
+ local tounicode, originals, ns, nl, private, unknown = { }, { }, 0, 0, fonts.privateoffset, format("%04X",utfbyte("?"))
data.luatex.tounicode, data.luatex.originals = tounicode, originals
local lumunic, uparser, oparser
if false then -- will become an option
- lumunic = load_lum_table(filename)
+ lumunic = loadlumtable(filename)
lumunic = lumunic and lumunic.tounicode
end
local cidinfo, cidnames, cidcodes = data.cidinfo
@@ -160,12 +160,12 @@ fonts.map.add_to_unicode = function(data,filename)
usedmap = usedmap and lower(usedmap)
usedmap = usedmap and fonts.cid.map[usedmap]
if usedmap then
- oparser = usedmap and make_name_parser(cidinfo.ordering)
+ oparser = usedmap and makenameparser(cidinfo.ordering)
cidnames = usedmap.names
cidcodes = usedmap.unicodes
end
- uparser = make_name_parser()
- local aglmap = fonts.map and fonts.map.agl_to_unicode
+ uparser = makenameparser()
+ local aglmap = fonts.enc.agl and fonts.enc.unicodes -- to name
for index, glyph in next, data.glyphs do
local name, unic = glyph.name, glyph.unicode or -1 -- play safe
if unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then
diff --git a/tex/context/base/font-mis.lua b/tex/context/base/font-mis.lua
index 7a2653856..645278837 100644
--- a/tex/context/base/font-mis.lua
+++ b/tex/context/base/font-mis.lua
@@ -38,7 +38,7 @@ local featuregroups = { "gsub", "gpos" }
function fonts.get_features(name,t,script,language)
local t = lower(t or (name and file.extname(name)) or "")
if t == "otf" or t == "ttf" or t == "ttc" or t == "dfont" then
- local filename = resolvers.find_file(name,t) or ""
+ local filename = resolvers.findfile(name,t) or ""
if filename ~= "" then
local data = fonts.otf.loadcached(filename)
if data and data.luatex and data.luatex.features then
diff --git a/tex/context/base/font-ota.lua b/tex/context/base/font-ota.lua
index 0ec7bac73..73e2c578b 100644
--- a/tex/context/base/font-ota.lua
+++ b/tex/context/base/font-ota.lua
@@ -43,23 +43,18 @@ local traverse_node_list = node.traverse
local fontdata = fonts.ids
local state = attributes.private('state')
-local fcs = (fonts.color and fonts.color.set) or function() end
-local fcr = (fonts.color and fonts.color.reset) or function() end
-
-local a_to_script = otf.a_to_script
-local a_to_language = otf.a_to_language
+local fontcolors = fonts.colors
+local fcs = (fontscolors and fontscolors.set) or function() end
+local fcr = (fontscolors and fontscolors.reset) or function() end
-- in the future we will use language/script attributes instead of the
-- font related value, but then we also need dynamic features which is
-- somewhat slower; and .. we need a chain of them
+local scriptandlanguage = otf.scriptandlanguage
+
function fonts.initializers.node.otf.analyze(tfmdata,value,attr)
- local script, language
- if attr and attr > 0 then
- script, language = a_to_script[attr], a_to_language[attr]
- else
- script, language = tfmdata.script, tfmdata.language
- end
+ local script, language = otf.scriptandlanguage(tfmdata,attr)
local action = initializers[script]
if action then
if type(action) == "function" then
@@ -76,12 +71,7 @@ end
function fonts.methods.node.otf.analyze(head,font,attr)
local tfmdata = fontdata[font]
- local script, language
- if attr and attr > 0 then
- script, language = a_to_script[attr], a_to_language[attr]
- else
- script, language = tfmdata.script, tfmdata.language
- end
+ local script, language = otf.scriptandlanguage(tfmdata,attr)
local action = methods[script]
if action then
if type(action) == "function" then
diff --git a/tex/context/base/font-otb.lua b/tex/context/base/font-otb.lua
index ea46ebdbc..e4d694ea9 100644
--- a/tex/context/base/font-otb.lua
+++ b/tex/context/base/font-otb.lua
@@ -154,7 +154,7 @@ local splitter = lpeg.splitat(" ")
local function prepare_base_substitutions(tfmdata,kind,value) -- we can share some code with the node features
if value then
local otfdata = tfmdata.shared.otfdata
- local validlookups, lookuplist = otf.collect_lookups(otfdata,kind,tfmdata.script,tfmdata.language)
+ local validlookups, lookuplist = otf.collectlookups(otfdata,kind,tfmdata.script,tfmdata.language)
if validlookups then
local ligatures = { }
local unicodes = tfmdata.unicodes -- names to unicodes
@@ -266,7 +266,7 @@ end
local function preparebasekerns(tfmdata,kind,value) -- todo what kind of kerns, currently all
if value then
local otfdata = tfmdata.shared.otfdata
- local validlookups, lookuplist = otf.collect_lookups(otfdata,kind,tfmdata.script,tfmdata.language)
+ local validlookups, lookuplist = otf.collectlookups(otfdata,kind,tfmdata.script,tfmdata.language)
if validlookups then
local unicodes = tfmdata.unicodes -- names to unicodes
local indices = tfmdata.indices
diff --git a/tex/context/base/font-otd.lua b/tex/context/base/font-otd.lua
index f23ef8eb4..910725a9f 100644
--- a/tex/context/base/font-otd.lua
+++ b/tex/context/base/font-otd.lua
@@ -10,23 +10,24 @@ local trace_dynamics = false trackers.register("otf.dynamics", function(v) trac
local report_otf = logs.new("load otf")
-local fonts = fonts
-local otf = fonts.otf
-local fontdata = fonts.ids
+local fonts = fonts
+local otf = fonts.otf
+local fontdata = fonts.ids
otf.features = otf.features or { }
otf.features.default = otf.features.default or { }
-local context_setups = fonts.define.specify.context_setups
-local context_numbers = fonts.define.specify.context_numbers
+local definers = fonts.definers
+local contextsetups = definers.specifiers.contextsetups
+local contextnumbers = definers.specifiers.contextnumbers
-- todo: dynamics namespace
-local a_to_script = { } otf.a_to_script = a_to_script
-local a_to_language = { } otf.a_to_language = a_to_language
+local a_to_script = { }
+local a_to_language = { }
function otf.setdynamics(font,dynamics,attribute)
- local features = context_setups[context_numbers[attribute]] -- can be moved to caller
+ local features = contextsetups[contextnumbers[attribute]] -- can be moved to caller
if features then
local script = features.script or 'dflt'
local language = features.language or 'dflt'
@@ -43,7 +44,7 @@ function otf.setdynamics(font,dynamics,attribute)
local dsla = dsl[attribute]
if dsla then
-- if trace_dynamics then
- -- report_otf("using dynamics %s: attribute %s, script %s, language %s",context_numbers[attribute],attribute,script,language)
+ -- report_otf("using dynamics %s: attribute %s, script %s, language %s",contextnumbers[attribute],attribute,script,language)
-- end
return dsla
else
@@ -63,10 +64,10 @@ function otf.setdynamics(font,dynamics,attribute)
tfmdata.script = script
tfmdata.shared.features = { }
-- end of save
- local set = fonts.define.check(features,otf.features.default)
+ local set = definers.check(features,otf.features.default)
dsla = otf.setfeatures(tfmdata,set)
if trace_dynamics then
- report_otf("setting dynamics %s: attribute %s, script %s, language %s, set: %s",context_numbers[attribute],attribute,script,language,table.sequenced(set))
+ report_otf("setting dynamics %s: attribute %s, script %s, language %s, set: %s",contextnumbers[attribute],attribute,script,language,table.sequenced(set))
end
-- we need to restore some values
tfmdata.script = saved.script
@@ -80,3 +81,11 @@ function otf.setdynamics(font,dynamics,attribute)
end
return nil -- { }
end
+
+function otf.scriptandlanguage(tfmdata,attr)
+ if attr and attr > 0 then
+ return a_to_script[attr] or tfmdata.script, a_to_language[attr] or tfmdata.language
+ else
+ return tfmdata.script, tfmdata.language
+ end
+end
diff --git a/tex/context/base/font-otf.lua b/tex/context/base/font-otf.lua
index cda1cfdd2..1da295eb0 100644
--- a/tex/context/base/font-otf.lua
+++ b/tex/context/base/font-otf.lua
@@ -15,6 +15,8 @@ local abs = math.abs
local getn = table.getn
local lpegmatch = lpeg.match
+local allocate = utilities.storage.allocate
+
local trace_private = false trackers.register("otf.private", function(v) trace_private = v end)
local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
local trace_features = false trackers.register("otf.features", function(v) trace_features = v end)
@@ -79,9 +81,11 @@ otf.features = otf.features or { }
otf.features.list = otf.features.list or { }
otf.features.default = otf.features.default or { }
-otf.enhancers = otf.enhancers or { }
+otf.enhancers = allocate()
local enhancers = otf.enhancers
+local definers = fonts.definers
+
otf.glists = { "gsub", "gpos" }
otf.version = 2.653 -- beware: also sync font-mis.lua
@@ -89,7 +93,6 @@ otf.pack = true -- beware: also sync font-mis.lua
otf.syncspace = true
otf.notdef = false
otf.cache = containers.define("fonts", "otf", otf.version, true)
-otf.cleanup_aat = false -- only context
local wildcard = "*"
local default = "dflt"
@@ -100,7 +103,7 @@ local default = "dflt"
-- we can have more local functions
-otf.tables.global_fields = table.tohash {
+otf.tables.global_fields = allocate( table.tohash {
"lookups",
"glyphs",
"subfonts",
@@ -116,9 +119,9 @@ otf.tables.global_fields = table.tohash {
"kern_classes",
"gpos",
"gsub"
-}
+} )
-otf.tables.valid_fields = {
+otf.tables.valid_fields = allocate( {
"anchor_classes",
"ascent",
"cache_version",
@@ -176,21 +179,18 @@ otf.tables.valid_fields = {
"weight",
"weight_width_slope_only",
"xuid",
-}
+} )
--[[ldx--
<p>Here we go.</p>
--ldx]]--
local function load_featurefile(ff,featurefile)
- if featurefile then
- featurefile = resolvers.find_file(file.addsuffix(featurefile,'fea'),'fea')
- if featurefile and featurefile ~= "" then
- if trace_loading then
- report_otf("featurefile: %s", featurefile)
- end
- fontloader.apply_featurefile(ff, featurefile)
+ if featurefile and featurefile ~= "" then
+ if trace_loading then
+ report_otf("featurefile: %s", featurefile)
end
+ fontloader.apply_featurefile(ff, featurefile)
end
end
@@ -215,8 +215,8 @@ local ordered_enhancers = { -- implemented later
"flatten glyph lookups", "flatten anchor tables", "flatten feature tables",
"simplify glyph lookups", -- some saving
"prepare luatex tables",
- "analyse features", "rehash features",
- "analyse anchors", "analyse marks", "analyse unicodes", "analyse subtables",
+ "analyze features", "rehash features",
+ "analyze anchors", "analyze marks", "analyze unicodes", "analyze subtables",
"check italic correction","check math",
"share widths",
"strip not needed data",
@@ -224,7 +224,7 @@ local ordered_enhancers = { -- implemented later
"check math parameters",
}
-local add_dimensions, show_feature_order -- implemented later
+local adddimensions, showfeatureorder -- implemented later
function otf.load(filename,format,sub,featurefile)
local name = file.basename(file.removesuffix(filename))
@@ -239,8 +239,50 @@ function otf.load(filename,format,sub,featurefile)
hash = hash .. "-" .. sub
end
hash = containers.cleanname(hash)
+ local featurefiles
+ if featurefile then
+ featurefiles = { }
+ for s in gmatch(featurefile,"[^,]+") do
+ local name = resolvers.findfile(file.addsuffix(s,'fea'),'fea') or ""
+ if name == "" then
+ report_otf("loading: no featurefile '%s'",s)
+ else
+ local attr = lfs.attributes(name)
+ featurefiles[#featurefiles+1] = {
+ name = name,
+ size = attr.size or 0,
+ time = attr.modification or 0,
+ }
+ end
+ end
+ if #featurefiles == 0 then
+ featurefiles = nil
+ end
+ end
local data = containers.read(otf.cache,hash)
- if not data or data.verbose ~= fonts.verbose or data.size ~= size or data.time ~= time then
+ local reload = not data or data.verbose ~= fonts.verbose or data.size ~= size or data.time ~= time
+ if not reload then
+ local featuredata = data.featuredata
+ if featurefiles then
+ if not featuredata or #featuredata ~= #featurefiles then
+ reload = true
+ else
+ for i=1,#featurefiles do
+ local fi, fd = featurefiles[i], featuredata[i]
+ if fi.name ~= fd.name or fi.size ~= fd.size or fi.time ~= fd.time then
+ reload = true
+ break
+ end
+ end
+ end
+ elseif featuredata then
+ reload = true
+ end
+ if reload then
+ report_otf("loading: forced reload due to changed featurefile specification: %s",featurefile or "--")
+ end
+ end
+ if reload then
report_otf("loading: %s (hash: %s)",filename,hash)
local ff, messages
if sub then
@@ -260,7 +302,11 @@ function otf.load(filename,format,sub,featurefile)
report_otf("font loaded okay")
end
if ff then
- load_featurefile(ff,featurefile)
+ if featurefiles then
+ for i=1,#featurefiles do
+ load_featurefile(ff,featurefiles[i].name)
+ end
+ end
data = fontloader.to_table(ff)
fontloader.close(ff)
if data then
@@ -275,6 +321,9 @@ function otf.load(filename,format,sub,featurefile)
end
data.size = size
data.time = time
+ if featurefiles then
+ data.featuredata = featurefiles
+ end
data.verbose = fonts.verbose
report_otf("saving in cache: %s",filename)
data = containers.write(otf.cache, hash, data)
@@ -293,15 +342,15 @@ function otf.load(filename,format,sub,featurefile)
report_otf("loading from cache: %s",hash)
end
enhance("unpack",data,filename,false) -- no message here
- add_dimensions(data)
+ adddimensions(data)
if trace_sequences then
- show_feature_order(data,filename)
+ showfeatureorder(data,filename)
end
end
return data
end
-add_dimensions = function(data)
+adddimensions = function(data)
-- todo: forget about the width if it's the defaultwidth (saves mem)
-- we could also build the marks hash here (instead of storing it)
if data then
@@ -337,7 +386,7 @@ add_dimensions = function(data)
end
end
-local function show_feature_order(otfdata,filename)
+local function showfeatureorder(otfdata,filename)
local sequences = otfdata.luatex.sequences
if sequences and #sequences > 0 then
if trace_loading then
@@ -410,11 +459,6 @@ enhancers["prepare luatex tables"] = function(data,filename)
luatex.creator = "context mkiv"
end
-enhancers["cleanup aat"] = function(data,filename)
- if otf.cleanup_aat then
- end
-end
-
local function analyze_features(g, features)
if g then
local t, done = { }, { }
@@ -438,7 +482,7 @@ local function analyze_features(g, features)
return nil
end
-enhancers["analyse features"] = function(data,filename)
+enhancers["analyze features"] = function(data,filename)
-- local luatex = data.luatex
-- luatex.gposfeatures = analyze_features(data.gpos)
-- luatex.gsubfeatures = analyze_features(data.gsub)
@@ -475,7 +519,7 @@ enhancers["rehash features"] = function(data,filename)
end
end
-enhancers["analyse anchors"] = function(data,filename)
+enhancers["analyze anchors"] = function(data,filename)
local classes = data.anchor_classes
local luatex = data.luatex
local anchor_to_lookup, lookup_to_anchor = { }, { }
@@ -501,7 +545,7 @@ enhancers["analyse anchors"] = function(data,filename)
end
end
-enhancers["analyse marks"] = function(data,filename)
+enhancers["analyze marks"] = function(data,filename)
local glyphs = data.glyphs
local marks = { }
data.luatex.marks = marks
@@ -513,9 +557,9 @@ enhancers["analyse marks"] = function(data,filename)
end
end
-enhancers["analyse unicodes"] = fonts.map.add_to_unicode
+enhancers["analyze unicodes"] = fonts.map.addtounicode
-enhancers["analyse subtables"] = function(data,filename)
+enhancers["analyze subtables"] = function(data,filename)
data.luatex = data.luatex or { }
local luatex = data.luatex
local sequences = { }
@@ -654,8 +698,8 @@ enhancers["prepare unicode"] = function(data,filename)
else
mapmap = mapmap.map
end
- local criterium = fonts.private
- local private = fonts.private
+ local criterium = fonts.privateoffset
+ local private = criterium
for index, glyph in next, glyphs do
if index > 0 then
local name = glyph.name
@@ -1360,7 +1404,7 @@ enhancers["flatten feature tables"] = function(data,filename)
end
end
-enhancers.patches = enhancers.patches or { }
+enhancers.patches = allocate()
enhancers["patch bugs"] = function(data,filename)
local basename = file.basename(lower(filename))
@@ -1575,7 +1619,7 @@ local function copytotfm(data,cache_id) -- we can save a copy when we reorder th
end
spaceunits = tonumber(spaceunits) or tfm.units/2 -- 500 -- brrr
-- we need a runtime lookup because of running from cdrom or zip, brrr (shouldn't we use the basename then?)
- local filename = fonts.tfm.checked_filename(luatex)
+ local filename = fonts.tfm.checkedfilename(luatex)
local fontname = metadata.fontname
local fullname = metadata.fullname or fontname
local cidinfo = data.cidinfo
@@ -1687,7 +1731,7 @@ local function otftotfm(specification)
tfmdata.has_italic = otfdata.metadata.has_italic
if not tfmdata.language then tfmdata.language = 'dflt' end
if not tfmdata.script then tfmdata.script = 'dflt' end
- shared.processes, shared.features = otf.setfeatures(tfmdata,fonts.define.check(features,otf.features.default))
+ shared.processes, shared.features = otf.setfeatures(tfmdata,definers.check(features,otf.features.default))
end
end
containers.write(tfm.cache,cache_id,tfmdata)
@@ -1697,7 +1741,7 @@ end
otf.features.register('mathsize')
-function tfm.read_from_open_type(specification) -- wrong namespace
+function tfm.read_from_otf(specification) -- wrong namespace
local tfmtable = otftotfm(specification)
if tfmtable then
local otfdata = tfmtable.shared.otfdata
@@ -1735,7 +1779,7 @@ function tfm.read_from_open_type(specification) -- wrong namespace
end
end
tfmtable = tfm.scale(tfmtable,s,specification.relativeid)
- if tfm.fontname_mode == "specification" then
+ if tfm.fontnamemode == "specification" then
-- not to be used in context !
local specname = specification.specification
if specname then
@@ -1753,7 +1797,7 @@ end
-- helpers
-function otf.collect_lookups(otfdata,kind,script,language)
+function otf.collectlookups(otfdata,kind,script,language)
-- maybe store this in the font
local sequences = otfdata.luatex.sequences
if sequences then
diff --git a/tex/context/base/font-oth.lua b/tex/context/base/font-oth.lua
index 448d54b90..d1a68d809 100644
--- a/tex/context/base/font-oth.lua
+++ b/tex/context/base/font-oth.lua
@@ -9,7 +9,7 @@ if not modules then modules = { } end modules ['font-oth'] = {
local lpegmatch = lpeg.match
local splitter = lpeg.Ct(lpeg.splitat(" "))
-local collect_lookups = fonts.otf.collect_lookups
+local collectlookups = fonts.otf.collectlookups
-- For the moment there is no need to cache this but this might
-- happen when I get the feeling that there is a performance
@@ -20,7 +20,7 @@ function fonts.otf.getalternate(tfmdata,k,kind,value)
local shared = tfmdata.shared
local otfdata = shared and shared.otfdata
if otfdata then
- local validlookups, lookuplist = collect_lookups(otfdata,kind,tfmdata.script,tfmdata.language)
+ local validlookups, lookuplist = collectlookups(otfdata,kind,tfmdata.script,tfmdata.language)
if validlookups then
local lookups = tfmdata.descriptions[k].slookups -- we assume only slookups (we can always extend)
if lookups then
diff --git a/tex/context/base/font-oti.lua b/tex/context/base/font-oti.lua
index a5bbabf5c..e531ba8b2 100644
--- a/tex/context/base/font-oti.lua
+++ b/tex/context/base/font-oti.lua
@@ -13,11 +13,8 @@ local fonts = fonts
local otf = fonts.otf
local initializers = fonts.initializers
-otf.default_language = 'latn'
-otf.default_script = 'dflt'
-
-local languages = otf.tables.languages
-local scripts = otf.tables.scripts
+local languages = otf.tables.languages
+local scripts = otf.tables.scripts
local function set_language(tfmdata,value)
if value then
diff --git a/tex/context/base/font-otn.lua b/tex/context/base/font-otn.lua
index a04d13758..4109ca2d0 100644
--- a/tex/context/base/font-otn.lua
+++ b/tex/context/base/font-otn.lua
@@ -200,10 +200,11 @@ local curscurs = attributes.private('curscurs')
local cursdone = attributes.private('cursdone')
local kernpair = attributes.private('kernpair')
-local set_mark = nodes.set_mark
-local set_cursive = nodes.set_cursive
-local set_kern = nodes.set_kern
-local set_pair = nodes.set_pair
+local injections = nodes.injections
+local setmark = injections.setmark
+local setcursive = injections.setcursive
+local setkern = injections.setkern
+local setpair = injections.setpair
local markonce = true
local cursonce = true
@@ -232,9 +233,10 @@ local featurevalue = false
-- we cheat a bit and assume that a font,attr combination are kind of ranged
-local context_setups = fonts.define.specify.context_setups
-local context_numbers = fonts.define.specify.context_numbers
-local context_merged = fonts.define.specify.context_merged
+local specifiers = fonts.definers.specifiers
+local contextsetups = specifiers.contextsetups
+local contextnumbers = specifiers.contextnumbers
+local contextmerged = specifiers.contextmerged
-- we cannot optimize with "start = first_character(head)" because then we don't
-- know which rlmode we're in which messes up cursive handling later on
@@ -591,7 +593,7 @@ function handlers.gpos_mark2base(start,kind,lookupname,markanchors,sequence)
if al[anchor] then
local ma = markanchors[anchor]
if ma then
- local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma)
+ local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma)
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%s,%s)",
pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -606,7 +608,7 @@ function handlers.gpos_mark2base(start,kind,lookupname,markanchors,sequence)
end
else -- if trace_bugs then
-- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- fonts.register_message(currentfont,basechar,"no base anchors")
+ fonts.registermessage(currentfont,basechar,"no base anchors")
end
elseif trace_bugs then
logwarning("%s: prev node is no char",pref(kind,lookupname))
@@ -659,7 +661,7 @@ function handlers.gpos_mark2ligature(start,kind,lookupname,markanchors,sequence)
if ma then
ba = ba[index]
if ba then
- local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma,index)
+ local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma,index)
if trace_marks then
logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%s,%s)",
pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
@@ -676,7 +678,7 @@ function handlers.gpos_mark2ligature(start,kind,lookupname,markanchors,sequence)
end
else -- if trace_bugs then
-- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- fonts.register_message(currentfont,basechar,"no base anchors")
+ fonts.registermessage(currentfont,basechar,"no base anchors")
end
elseif trace_bugs then
logwarning("%s: prev node is no char",pref(kind,lookupname))
@@ -706,7 +708,7 @@ function handlers.gpos_mark2mark(start,kind,lookupname,markanchors,sequence)
if al[anchor] then
local ma = markanchors[anchor]
if ma then
- local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma)
+ local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma)
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%s,%s)",
pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -722,7 +724,7 @@ function handlers.gpos_mark2mark(start,kind,lookupname,markanchors,sequence)
end
else -- if trace_bugs then
-- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- fonts.register_message(currentfont,basechar,"no base anchors")
+ fonts.registermessage(currentfont,basechar,"no base anchors")
end
elseif trace_bugs then
logwarning("%s: prev node is no mark",pref(kind,lookupname))
@@ -764,7 +766,7 @@ function handlers.gpos_cursive(start,kind,lookupname,exitanchors,sequence) -- to
if al[anchor] then
local exit = exitanchors[anchor]
if exit then
- local dx, dy, bound = set_cursive(start,nxt,tfmdata.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ local dx, dy, bound = setcursive(start,nxt,tfmdata.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
if trace_cursive then
logprocess("%s: moving %s to %s cursive (%s,%s) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
end
@@ -777,7 +779,7 @@ function handlers.gpos_cursive(start,kind,lookupname,exitanchors,sequence) -- to
end
else -- if trace_bugs then
-- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
- fonts.register_message(currentfont,startchar,"no entry anchors")
+ fonts.registermessage(currentfont,startchar,"no entry anchors")
end
break
end
@@ -794,7 +796,7 @@ end
function handlers.gpos_single(start,kind,lookupname,kerns,sequence)
local startchar = start.char
- local dx, dy, w, h = set_pair(start,tfmdata.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ local dx, dy, w, h = setpair(start,tfmdata.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
if trace_kerns then
logprocess("%s: shifting single %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
end
@@ -825,14 +827,14 @@ function handlers.gpos_pair(start,kind,lookupname,kerns,sequence)
local a, b = krn[3], krn[4]
if a and #a > 0 then
local startchar = start.char
- local x, y, w, h = set_pair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
if trace_kerns then
logprocess("%s: shifting first of pair %s and %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
end
if b and #b > 0 then
local startchar = start.char
- local x, y, w, h = set_pair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
if trace_kerns then
logprocess("%s: shifting second of pair %s and %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
@@ -841,7 +843,7 @@ function handlers.gpos_pair(start,kind,lookupname,kerns,sequence)
report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
local a, b = krn[3], krn[7]
if a and a ~= 0 then
- local k = set_kern(snext,factor,rlmode,a)
+ local k = setkern(snext,factor,rlmode,a)
if trace_kerns then
logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
end
@@ -852,7 +854,7 @@ function handlers.gpos_pair(start,kind,lookupname,kerns,sequence)
end
done = true
elseif krn ~= 0 then
- local k = set_kern(snext,factor,rlmode,krn)
+ local k = setkern(snext,factor,rlmode,krn)
if trace_kerns then
logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
end
@@ -1223,7 +1225,7 @@ function chainprocs.gpos_mark2base(start,stop,kind,chainname,currentcontext,cach
if al[anchor] then
local ma = markanchors[anchor]
if ma then
- local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma)
+ local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma)
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%s,%s)",
cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -1296,7 +1298,7 @@ function chainprocs.gpos_mark2ligature(start,stop,kind,chainname,currentcontext,
if ma then
ba = ba[index]
if ba then
- local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma,index)
+ local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma,index)
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%s,%s)",
cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
@@ -1348,7 +1350,7 @@ function chainprocs.gpos_mark2mark(start,stop,kind,chainname,currentcontext,cach
if al[anchor] then
local ma = markanchors[anchor]
if ma then
- local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma)
+ local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma)
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%s,%s)",
cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -1414,7 +1416,7 @@ function chainprocs.gpos_cursive(start,stop,kind,chainname,currentcontext,cache,
if al[anchor] then
local exit = exitanchors[anchor]
if exit then
- local dx, dy, bound = set_cursive(start,nxt,tfmdata.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ local dx, dy, bound = setcursive(start,nxt,tfmdata.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
if trace_cursive then
logprocess("%s: moving %s to %s cursive (%s,%s) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
end
@@ -1427,7 +1429,7 @@ function chainprocs.gpos_cursive(start,stop,kind,chainname,currentcontext,cache,
end
else -- if trace_bugs then
-- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
- fonts.register_message(currentfont,startchar,"no entry anchors")
+ fonts.registermessage(currentfont,startchar,"no entry anchors")
end
break
end
@@ -1453,7 +1455,7 @@ function chainprocs.gpos_single(start,stop,kind,chainname,currentcontext,cache,c
if kerns then
kerns = kerns[startchar]
if kerns then
- local dx, dy, w, h = set_pair(start,tfmdata.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ local dx, dy, w, h = setpair(start,tfmdata.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
if trace_kerns then
logprocess("%s: shifting single %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
end
@@ -1491,14 +1493,14 @@ function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,cache,cur
local a, b = krn[3], krn[4]
if a and #a > 0 then
local startchar = start.char
- local x, y, w, h = set_pair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
if trace_kerns then
logprocess("%s: shifting first of pair %s and %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
end
if b and #b > 0 then
local startchar = start.char
- local x, y, w, h = set_pair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
if trace_kerns then
logprocess("%s: shifting second of pair %s and %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
@@ -1507,7 +1509,7 @@ function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,cache,cur
report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
local a, b = krn[3], krn[7]
if a and a ~= 0 then
- local k = set_kern(snext,factor,rlmode,a)
+ local k = setkern(snext,factor,rlmode,a)
if trace_kerns then
logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
end
@@ -1518,7 +1520,7 @@ function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,cache,cur
end
done = true
elseif krn ~= 0 then
- local k = set_kern(snext,factor,rlmode,krn)
+ local k = setkern(snext,factor,rlmode,krn)
if trace_kerns then
logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
end
@@ -1922,8 +1924,8 @@ function fonts.methods.node.otf.features(head,font,attr)
local script, language, s_enabled, a_enabled, dyn
local attribute_driven = attr and attr ~= 0
if attribute_driven then
- local features = context_setups[context_numbers[attr]] -- could be a direct list
- dyn = context_merged[attr] or 0
+ local features = contextsetups[contextnumbers[attr]] -- could be a direct list
+ dyn = contextmerged[attr] or 0
language, script = features.language or "dflt", features.script or "dflt"
a_enabled = features -- shared.features -- can be made local to the resolver
if dyn == 2 or dyn == -2 then
diff --git a/tex/context/base/font-ott.lua b/tex/context/base/font-ott.lua
index 379032f18..3c3ecdee0 100644
--- a/tex/context/base/font-ott.lua
+++ b/tex/context/base/font-ott.lua
@@ -10,6 +10,8 @@ local type, next, tonumber, tostring = type, next, tonumber, tostring
local gsub, lower, format = string.gsub, string.lower, string.format
local is_boolean = string.is_boolean
+local allocate = utilities.storage.allocate
+
local fonts = fonts
fonts.otf = fonts.otf or { }
local otf = fonts.otf
@@ -20,7 +22,7 @@ local tables = otf.tables
otf.meanings = otf.meanings or { }
local meanings = otf.meanings
-local scripts = {
+local scripts = allocate {
['dflt'] = 'Default',
['arab'] = 'Arabic',
@@ -93,7 +95,7 @@ local scripts = {
['yi' ] = 'Yi',
}
-local languages = {
+local languages = allocate {
['dflt'] = 'Default',
['aba'] = 'Abaza',
@@ -487,7 +489,7 @@ local languages = {
['zul'] = 'Zulu'
}
-local features = {
+local features = allocate {
['aalt'] = 'Access All Alternates',
['abvf'] = 'Above-Base Forms',
['abvm'] = 'Above-Base Mark Positioning',
@@ -625,7 +627,7 @@ local features = {
['tlig'] = 'Traditional TeX Ligatures',
}
-local baselines = {
+local baselines = allocate {
['hang'] = 'Hanging baseline',
['icfb'] = 'Ideographic character face bottom edge baseline',
['icft'] = 'Ideographic character face tope edige baseline',
@@ -635,32 +637,32 @@ local baselines = {
['romn'] = 'Roman baseline'
}
-local to_scripts = table.swaphash(scripts )
-local to_languages = table.swaphash(languages)
-local to_features = table.swaphash(features )
+local verbosescripts = allocate(table.swaphash(scripts ))
+local verboselanguages = allocate(table.swaphash(languages))
+local verbosefeatures = allocate(table.swaphash(features ))
-tables.scripts = scripts
-tables.languages = languages
-tables.features = features
-tables.baselines = baselines
+tables.scripts = scripts
+tables.languages = languages
+tables.features = features
+tables.baselines = baselines
-tables.to_scripts = to_scripts
-tables.to_languages = to_languages
-tables.to_features = to_features
+tables.verbosescripts = verbosescripts
+tables.verboselanguages = verboselanguages
+tables.verbosefeatures = verbosefeatures
-for k, v in next, to_features do
+for k, v in next, verbosefeatures do
local stripped = gsub(k,"%-"," ")
- to_features[stripped] = v
+ verbosefeatures[stripped] = v
local stripped = gsub(k,"[^a-zA-Z0-9]","")
- to_features[stripped] = v
+ verbosefeatures[stripped] = v
end
-for k, v in next, to_features do
- to_features[lower(k)] = v
+for k, v in next, verbosefeatures do
+ verbosefeatures[lower(k)] = v
end
-- can be sped up by local tables
-function tables.to_tag(id)
+function tables.totag(id) -- not used
return format("%4s",lower(id))
end
@@ -694,14 +696,14 @@ function meanings.normalize(features)
if k == "language" or k == "lang" then
v = gsub(lower(v),"[^a-z0-9%-]","")
if not languages[v] then
- h.language = to_languages[v] or "dflt"
+ h.language = verboselanguages[v] or "dflt"
else
h.language = v
end
elseif k == "script" then
v = gsub(lower(v),"[^a-z0-9%-]","")
if not scripts[v] then
- h.script = to_scripts[v] or "dflt"
+ h.script = verbosescripts[v] or "dflt"
else
h.script = v
end
@@ -714,7 +716,7 @@ function meanings.normalize(features)
v = b
end
end
- k = to_features[k] or k
+ k = verbosefeatures[k] or k
local c = checkers[k]
h[k] = c and c(v) or v
end
diff --git a/tex/context/base/font-syn.lua b/tex/context/base/font-syn.lua
index 39e3df5ea..03aa528be 100644
--- a/tex/context/base/font-syn.lua
+++ b/tex/context/base/font-syn.lua
@@ -17,6 +17,8 @@ local lpegmatch = lpeg.match
local utfgsub, utflower = utf.gsub, utf.lower
local unpack = unpack or table.unpack
+local allocate = utilities.storage.allocate
+
local trace_names = false trackers.register("fonts.names", function(v) trace_names = v end)
local trace_warnings = false trackers.register("fonts.warnings", function(v) trace_warnings = v end)
@@ -39,7 +41,7 @@ local filters = names.filters
names.data = names.data or { }
-names.version = 1.103
+names.version = 1.110
names.basename = "names"
names.saved = false
names.loaded = false
@@ -103,14 +105,14 @@ local variants = Cs( -- fax casual
local any = P(1)
-local analysed_table
+local analyzed_table
-local analyser = Cs (
+local analyzer = Cs (
(
- weights / function(s) analysed_table[1] = s return "" end
- + styles / function(s) analysed_table[2] = s return "" end
- + widths / function(s) analysed_table[3] = s return "" end
- + variants / function(s) analysed_table[4] = s return "" end
+ weights / function(s) analyzed_table[1] = s return "" end
+ + styles / function(s) analyzed_table[2] = s return "" end
+ + widths / function(s) analyzed_table[3] = s return "" end
+ + variants / function(s) analyzed_table[4] = s return "" end
+ any
)^0
)
@@ -137,11 +139,11 @@ function names.splitspec(askedname)
return name or askedname, weight, style, width, variant
end
-local function analysespec(somename)
+local function analyzespec(somename)
if somename then
- analysed_table = { }
- local name = lpegmatch(analyser,somename)
- return name, analysed_table[1], analysed_table[2], analysed_table[3], analysed_table[4]
+ analyzed_table = { }
+ local name = lpegmatch(analyzer,somename)
+ return name, analyzed_table[1], analyzed_table[2], analyzed_table[3], analyzed_table[4]
end
end
@@ -172,9 +174,9 @@ filters.otf = fontloader.fullinfo
function filters.afm(name)
-- we could parse the afm file as well, and then report an error but
-- it's not worth the trouble
- local pfbname = resolvers.find_file(file.removesuffix(name)..".pfb","pfb") or ""
+ local pfbname = resolvers.findfile(file.removesuffix(name)..".pfb","pfb") or ""
if pfbname == "" then
- pfbname = resolvers.find_file(file.removesuffix(file.basename(name))..".pfb","pfb") or ""
+ pfbname = resolvers.findfile(file.removesuffix(file.basename(name))..".pfb","pfb") or ""
end
if pfbname ~= "" then
local f = io.open(name)
@@ -211,8 +213,8 @@ filters.list = {
--~ "ttc", "otf", "ttf", "dfont", "afm",
}
-names.xml_configuration_file = "fonts.conf" -- a bit weird format, bonus feature
-names.environment_path_variable = "OSFONTDIR" -- the official way, in minimals etc
+names.fontconfigfile = "fonts.conf" -- a bit weird format, bonus feature
+names.osfontdirvariable = "OSFONTDIR" -- the official way, in minimals etc
filters.paths = { }
filters.names = { }
@@ -221,7 +223,7 @@ function names.getpaths(trace)
local hash, result = { }, { }
local function collect(t,where)
for i=1, #t do
- local v = resolvers.clean_path(t[i])
+ local v = resolvers.cleanpath(t[i])
v = gsub(v,"/+$","") -- not needed any more
local key = lower(v)
report_names("adding path from %s: %s",where,v)
@@ -230,18 +232,18 @@ function names.getpaths(trace)
end
end
end
- local path = names.environment_path_variable or ""
+ local path = names.osfontdirvariable or ""
if path ~= "" then
- collect(resolvers.expanded_path_list(path),path)
+ collect(resolvers.expandedpathlist(path),path)
end
if xml then
local confname = resolvers.getenv("FONTCONFIG_FILE") or ""
if confname == "" then
- confname = names.xml_configuration_file or ""
+ confname = names.fontconfigfile or ""
end
if confname ~= "" then
-- first look in the tex tree
- local name = resolvers.find_file(confname,"fontconfig files") or ""
+ local name = resolvers.findfile(confname,"fontconfig files") or ""
if name == "" then
-- after all, fontconfig is a unix thing
name = file.join("/etc",confname)
@@ -292,7 +294,20 @@ local function cleanname(name)
-- return (utfgsub(utfgsub(lower(str),"[^%a%A%d]",""),"%s",""))
end
-names.cleanname = cleanname
+local function cleanfilename(fullname,defaultsuffix)
+ local _, _, name, suffix = file.splitname(fullname)
+ name = gsub(lower(name),"[^%a%d]","")
+ if suffix and suffix ~= "" then
+ return name .. ".".. suffix
+ elseif defaultsuffix and defaultsuffix ~= "" then
+ return name .. ".".. defaultsuffix
+ else
+ return name
+ end
+end
+
+names.cleanname = cleanname
+names.cleanfilename = cleanfilename
local function check_names(result)
local names = result.names
@@ -310,7 +325,7 @@ local function walk_tree(pathlist,suffix,identify)
if pathlist then
for i=1,#pathlist do
local path = pathlist[i]
- path = resolvers.clean_path(path .. "/")
+ path = resolvers.cleanpath(path .. "/")
path = gsub(path,"/+","/")
local pattern = path .. "**." .. suffix -- ** forces recurse
report_names( "globbing path %s",pattern)
@@ -348,8 +363,8 @@ local function check_name(data,result,filename,suffix,subfont)
modifiers = modifiers and cleanname(modifiers)
weight = weight and cleanname(weight)
italicangle = (italicangle == 0) and nil
- -- analyse
- local a_name, a_weight, a_style, a_width, a_variant = analysespec(fullname or fontname or familyname)
+ -- analyze
+ local a_name, a_weight, a_style, a_width, a_variant = analyzespec(fullname or fontname or familyname)
-- check
local width = a_width
local variant = a_variant
@@ -400,11 +415,11 @@ local function cleanupkeywords()
for i=1,#specifications do
local s = specifications[i]
-- fix (sofar styles are taken from the name, and widths from the specification)
- local _, b_weight, b_style, b_width, b_variant = analysespec(s.weight)
- local _, c_weight, c_style, c_width, c_variant = analysespec(s.style)
- local _, d_weight, d_style, d_width, d_variant = analysespec(s.width)
- local _, e_weight, e_style, e_width, e_variant = analysespec(s.variant)
- local _, f_weight, f_style, f_width, f_variant = analysespec(s.fullname or "")
+ local _, b_weight, b_style, b_width, b_variant = analyzespec(s.weight)
+ local _, c_weight, c_style, c_width, c_variant = analyzespec(s.style)
+ local _, d_weight, d_style, d_width, d_variant = analyzespec(s.width)
+ local _, e_weight, e_style, e_width, e_variant = analyzespec(s.variant)
+ local _, f_weight, f_style, f_width, f_variant = analyzespec(s.fullname or "")
local weight = b_weight or c_weight or d_weight or e_weight or f_weight or "normal"
local style = b_style or c_style or d_style or e_style or f_style or "normal"
local width = b_width or c_width or d_width or e_width or f_width or "normal"
@@ -593,7 +608,7 @@ local function unpackreferences()
end
end
-local function analysefiles()
+local function analyzefiles()
local data = names.data
local done, totalnofread, totalnofskipped, totalnofduplicates, nofread, nofskipped, nofduplicates = { }, 0, 0, 0, 0, 0, 0
local skip_paths, skip_names = filters.paths, filters.names
@@ -616,7 +631,7 @@ local function analysefiles()
logs.push()
end
nofskipped = nofskipped + 1
- elseif not file.is_qualified_path(completename) and resolvers.find_file(completename,suffix) == "" then
+ elseif not file.is_qualified_path(completename) and resolvers.findfile(completename,suffix) == "" then
-- not locateble by backend anyway
if trace_names then
report_names("%s font %s cannot be found by backend",suffix,completename)
@@ -702,7 +717,7 @@ local function analysefiles()
report_names( "warnings are disabled (tracker 'fonts.warnings')")
end
traverse("tree", function(suffix) -- TEXTREE only
- resolvers.with_files(".*%." .. suffix .. "$", function(method,root,path,name)
+ resolvers.dowithfilesintree(".*%." .. suffix .. "$", function(method,root,path,name)
if method == "file" or method == "tree" then
local completename = root .."/" .. path .. "/" .. name
identify(completename,name,suffix,name)
@@ -718,7 +733,7 @@ local function analysefiles()
-- we do this only for a stupid names run, not used for context itself,
-- using the vars is to clumsy so we just stick to a full scan instead
traverse("lsr", function(suffix) -- all trees
- local pathlist = resolvers.split_path(resolvers.show_path("ls-R") or "")
+ local pathlist = resolvers.splitpath(resolvers.showpath("ls-R") or "")
walk_tree(pathlist,suffix,identify)
end)
else
@@ -729,6 +744,17 @@ local function analysefiles()
data.statistics.readfiles, data.statistics.skippedfiles, data.statistics.duplicatefiles = totalnofread, totalnofskipped, totalnofduplicates
end
+local function addfilenames()
+ local data = names.data
+ local specifications = data.specifications
+ local files = { }
+ for i=1,#specifications do
+ local fullname = specifications[i].filename
+ files[cleanfilename(fullname)] = fullname
+ end
+ data.files = files
+end
+
local function rejectclashes() -- just to be sure, so no explicit afm will be found then
local specifications, used, okay = names.data.specifications, { }, { }
for i=1,#specifications do
@@ -766,19 +792,20 @@ local function resetdata()
specifications = { },
families = { },
statistics = { },
- data_state = resolvers.data_state(),
+ datastate = resolvers.datastate(),
}
end
function names.identify()
resetdata()
- analysefiles()
+ analyzefiles()
rejectclashes()
collectfamilies()
collectstatistics()
cleanupkeywords()
collecthashes()
checkduplicates()
+ addfilenames()
-- sorthashes() -- will be resorted when saved
end
@@ -838,7 +865,7 @@ local function list_them(mapping,sorted,pattern,t,all)
end
function names.list(pattern,reload,all) -- here?
- names.load(reload)
+ names.load() -- todo reload
if names.loaded then
local t = { }
local data = names.data
@@ -868,8 +895,8 @@ local function is_reloaded()
if not reloaded then
local data = names.data
if names.autoreload then
- local c_status = table.serialize(resolvers.data_state())
- local f_status = table.serialize(data.data_state)
+ local c_status = table.serialize(resolvers.datastate())
+ local f_status = table.serialize(data.datastate)
if c_status == f_status then
-- report_names("font database matches configuration and file hashes")
return
@@ -974,6 +1001,17 @@ function names.resolve(askedname,sub)
end
end
+function names.getfilename(askedname,suffix) -- last resort, strip funny chars
+ names.load()
+ local files = names.data.files
+ askedname = files and files[cleanfilename(askedname,suffix)] or ""
+ if askedname == "" then
+ return ""
+ else
+ return resolvers.findbinfile(askedname,suffix) or ""
+ end
+end
+
-- specified search
local function s_collect_weight_style_width_variant(found,done,all,weight,style,width,variant,family)
@@ -1284,7 +1322,8 @@ end
function names.specification(askedname,weight,style,width,variant,reload,all)
if askedname and askedname ~= "" and names.enabled then
- askedname = lower(askedname) -- or cleanname
+--~ askedname = lower(askedname) -- or cleanname
+ askedname = cleanname(askedname) -- or cleanname
names.load(reload)
local found = heuristic(askedname,weight,style,width,variant,all)
if not found and is_reloaded() then
@@ -1299,7 +1338,8 @@ end
function names.collect(askedname,weight,style,width,variant,reload,all)
if askedname and askedname ~= "" and names.enabled then
- askedname = lower(askedname) -- or cleanname
+--~ askedname = lower(askedname) -- or cleanname
+ askedname = cleanname(askedname) -- or cleanname
names.load(reload)
local list = heuristic(askedname,weight,style,width,variant,true)
if not list or #list == 0 and is_reloaded() then
@@ -1323,14 +1363,16 @@ end
function names.collectfiles(askedname,reload) -- no all
if askedname and askedname ~= "" and names.enabled then
- askedname = lower(askedname) -- or cleanname
+--~ askedname = lower(askedname) -- or cleanname
+ askedname = cleanname(askedname) -- or cleanname
names.load(reload)
local list = { }
local basename = file.basename
local specifications = names.data.specifications
for i=1,#specifications do
local s = specifications[i]
- if find(lower(basename(s.filename)),askedname) then
+--~ if find(lower(basename(s.filename)),askedname) then
+ if find(cleanname(basename(s.filename)),askedname) then
list[#list+1] = s
end
end
@@ -1338,65 +1380,65 @@ function names.collectfiles(askedname,reload) -- no all
end
end
---[[ldx--
-<p>Fallbacks, not permanent but a transition thing.</p>
---ldx]]--
-
-names.new_to_old = {
- ["lmroman10-capsregular"] = "lmromancaps10-oblique",
- ["lmroman10-capsoblique"] = "lmromancaps10-regular",
- ["lmroman10-demi"] = "lmromandemi10-oblique",
- ["lmroman10-demioblique"] = "lmromandemi10-regular",
- ["lmroman8-oblique"] = "lmromanslant8-regular",
- ["lmroman9-oblique"] = "lmromanslant9-regular",
- ["lmroman10-oblique"] = "lmromanslant10-regular",
- ["lmroman12-oblique"] = "lmromanslant12-regular",
- ["lmroman17-oblique"] = "lmromanslant17-regular",
- ["lmroman10-boldoblique"] = "lmromanslant10-bold",
- ["lmroman10-dunhill"] = "lmromandunh10-oblique",
- ["lmroman10-dunhilloblique"] = "lmromandunh10-regular",
- ["lmroman10-unslanted"] = "lmromanunsl10-regular",
- ["lmsans10-demicondensed"] = "lmsansdemicond10-regular",
- ["lmsans10-demicondensedoblique"] = "lmsansdemicond10-oblique",
- ["lmsansquotation8-bold"] = "lmsansquot8-bold",
- ["lmsansquotation8-boldoblique"] = "lmsansquot8-boldoblique",
- ["lmsansquotation8-oblique"] = "lmsansquot8-oblique",
- ["lmsansquotation8-regular"] = "lmsansquot8-regular",
- ["lmtypewriter8-regular"] = "lmmono8-regular",
- ["lmtypewriter9-regular"] = "lmmono9-regular",
- ["lmtypewriter10-regular"] = "lmmono10-regular",
- ["lmtypewriter12-regular"] = "lmmono12-regular",
- ["lmtypewriter10-italic"] = "lmmono10-italic",
- ["lmtypewriter10-oblique"] = "lmmonoslant10-regular",
- ["lmtypewriter10-capsoblique"] = "lmmonocaps10-oblique",
- ["lmtypewriter10-capsregular"] = "lmmonocaps10-regular",
- ["lmtypewriter10-light"] = "lmmonolt10-regular",
- ["lmtypewriter10-lightoblique"] = "lmmonolt10-oblique",
- ["lmtypewriter10-lightcondensed"] = "lmmonoltcond10-regular",
- ["lmtypewriter10-lightcondensedoblique"] = "lmmonoltcond10-oblique",
- ["lmtypewriter10-dark"] = "lmmonolt10-bold",
- ["lmtypewriter10-darkoblique"] = "lmmonolt10-boldoblique",
- ["lmtypewritervarwd10-regular"] = "lmmonoproplt10-regular",
- ["lmtypewritervarwd10-oblique"] = "lmmonoproplt10-oblique",
- ["lmtypewritervarwd10-light"] = "lmmonoprop10-regular",
- ["lmtypewritervarwd10-lightoblique"] = "lmmonoprop10-oblique",
- ["lmtypewritervarwd10-dark"] = "lmmonoproplt10-bold",
- ["lmtypewritervarwd10-darkoblique"] = "lmmonoproplt10-boldoblique",
-}
-
-names.old_to_new = table.swapped(names.new_to_old)
+--~ --[[ldx--
+--~ <p>Fallbacks, not permanent but a transition thing.</p>
+--~ --ldx]]--
+--~
+--~ names.new_to_old = allocate {
+--~ ["lmroman10-capsregular"] = "lmromancaps10-oblique",
+--~ ["lmroman10-capsoblique"] = "lmromancaps10-regular",
+--~ ["lmroman10-demi"] = "lmromandemi10-oblique",
+--~ ["lmroman10-demioblique"] = "lmromandemi10-regular",
+--~ ["lmroman8-oblique"] = "lmromanslant8-regular",
+--~ ["lmroman9-oblique"] = "lmromanslant9-regular",
+--~ ["lmroman10-oblique"] = "lmromanslant10-regular",
+--~ ["lmroman12-oblique"] = "lmromanslant12-regular",
+--~ ["lmroman17-oblique"] = "lmromanslant17-regular",
+--~ ["lmroman10-boldoblique"] = "lmromanslant10-bold",
+--~ ["lmroman10-dunhill"] = "lmromandunh10-oblique",
+--~ ["lmroman10-dunhilloblique"] = "lmromandunh10-regular",
+--~ ["lmroman10-unslanted"] = "lmromanunsl10-regular",
+--~ ["lmsans10-demicondensed"] = "lmsansdemicond10-regular",
+--~ ["lmsans10-demicondensedoblique"] = "lmsansdemicond10-oblique",
+--~ ["lmsansquotation8-bold"] = "lmsansquot8-bold",
+--~ ["lmsansquotation8-boldoblique"] = "lmsansquot8-boldoblique",
+--~ ["lmsansquotation8-oblique"] = "lmsansquot8-oblique",
+--~ ["lmsansquotation8-regular"] = "lmsansquot8-regular",
+--~ ["lmtypewriter8-regular"] = "lmmono8-regular",
+--~ ["lmtypewriter9-regular"] = "lmmono9-regular",
+--~ ["lmtypewriter10-regular"] = "lmmono10-regular",
+--~ ["lmtypewriter12-regular"] = "lmmono12-regular",
+--~ ["lmtypewriter10-italic"] = "lmmono10-italic",
+--~ ["lmtypewriter10-oblique"] = "lmmonoslant10-regular",
+--~ ["lmtypewriter10-capsoblique"] = "lmmonocaps10-oblique",
+--~ ["lmtypewriter10-capsregular"] = "lmmonocaps10-regular",
+--~ ["lmtypewriter10-light"] = "lmmonolt10-regular",
+--~ ["lmtypewriter10-lightoblique"] = "lmmonolt10-oblique",
+--~ ["lmtypewriter10-lightcondensed"] = "lmmonoltcond10-regular",
+--~ ["lmtypewriter10-lightcondensedoblique"] = "lmmonoltcond10-oblique",
+--~ ["lmtypewriter10-dark"] = "lmmonolt10-bold",
+--~ ["lmtypewriter10-darkoblique"] = "lmmonolt10-boldoblique",
+--~ ["lmtypewritervarwd10-regular"] = "lmmonoproplt10-regular",
+--~ ["lmtypewritervarwd10-oblique"] = "lmmonoproplt10-oblique",
+--~ ["lmtypewritervarwd10-light"] = "lmmonoprop10-regular",
+--~ ["lmtypewritervarwd10-lightoblique"] = "lmmonoprop10-oblique",
+--~ ["lmtypewritervarwd10-dark"] = "lmmonoproplt10-bold",
+--~ ["lmtypewritervarwd10-darkoblique"] = "lmmonoproplt10-boldoblique",
+--~ }
+--~
+--~ names.old_to_new = allocate(table.swapped(names.new_to_old))
function names.exists(name)
local found = false
local list = filters.list
for k=1,#list do
local v = list[k]
- found = (resolvers.find_file(name,v) or "") ~= ""
+ found = (resolvers.findfile(name,v) or "") ~= ""
if found then
return found
end
end
- return ((resolvers.find_file(name,"tfm") or "") ~= "") or ((names.resolve(name) or "") ~= "")
+ return ((resolvers.findfile(name,"tfm") or "") ~= "") or ((names.resolve(name) or "") ~= "")
end
-- for i=1,fonts.names.lookup(pattern) do
diff --git a/tex/context/base/font-tfm.lua b/tex/context/base/font-tfm.lua
index d51bcc3b8..a48d3c3f4 100644
--- a/tex/context/base/font-tfm.lua
+++ b/tex/context/base/font-tfm.lua
@@ -11,6 +11,8 @@ local utf = unicode.utf8
local next, format, match, lower, gsub = next, string.format, string.match, string.lower, string.gsub
local concat, sortedkeys, utfbyte, serialize = table.concat, table.sortedkeys, utf.byte, table.serialize
+local allocate = utilities.storage.allocate
+
local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
local trace_scaling = false trackers.register("fonts.scaling" , function(v) trace_scaling = v end)
@@ -28,8 +30,8 @@ local report_define = logs.new("define fonts")
local fonts = fonts
local tfm = fonts.tfm
-fonts.loaded = fonts.loaded or { }
-fonts.dontembed = fonts.dontembed or { }
+fonts.loaded = allocate()
+fonts.dontembed = allocate()
fonts.triggers = fonts.triggers or { } -- brrr
fonts.initializers = fonts.initializers or { }
fonts.initializers.common = fonts.initializers.common or { }
@@ -47,10 +49,10 @@ local glyph_code = nodecodes.glyph
supplied by <l n='luatex'/>.</p>
--ldx]]--
-tfm.resolve_vf = true -- false
-tfm.share_base_kerns = false -- true (.5 sec slower on mk but brings down mem from 410M to 310M, beware: then script/lang share too)
-tfm.mathactions = { }
-tfm.fontname_mode = "fullpath"
+tfm.resolvevirtualtoo = true -- false
+tfm.sharebasekerns = false -- true (.5 sec slower on mk but brings down mem from 410M to 310M, beware: then script/lang share too)
+tfm.mathactions = { }
+tfm.fontnamemode = "fullpath"
tfm.enhance = tfm.enhance or function() end
@@ -65,7 +67,7 @@ function tfm.read_from_tfm(specification)
tfmdata = font.read_tfm(fname,specification.size) -- not cached, fast enough
if tfmdata then
tfmdata.descriptions = tfmdata.descriptions or { }
- if tfm.resolve_vf then
+ if tfm.resolvevirtualtoo then
fonts.logger.save(tfmdata,file.extname(fname),specification) -- strange, why here
fname = resolvers.findbinfile(specification.name, 'ovf')
if fname and fname ~= "" then
@@ -126,7 +128,7 @@ end
to scale virtual characters.</p>
--ldx]]--
-function tfm.get_virtual_id(tfmdata)
+function tfm.getvirtualid(tfmdata)
-- since we don't know the id yet, we use 0 as signal
if not tfmdata.fonts then
tfmdata.type = "virtual"
@@ -138,7 +140,7 @@ function tfm.get_virtual_id(tfmdata)
end
end
-function tfm.check_virtual_id(tfmdata, id)
+function tfm.checkvirtualid(tfmdata, id)
if tfmdata and tfmdata.type == "virtual" then
if not tfmdata.fonts or #tfmdata.fonts == 0 then
tfmdata.type, tfmdata.fonts = "real", nil
@@ -168,7 +170,7 @@ fonts.trace_scaling = false
-- sharedkerns are unscaled and are be hashed by concatenated indexes
--~ function tfm.check_base_kerns(tfmdata)
---~ if tfm.share_base_kerns then
+--~ if tfm.sharebasekerns then
--~ local sharedkerns = tfmdata.sharedkerns
--~ if sharedkerns then
--~ local basekerns = { }
@@ -180,7 +182,7 @@ fonts.trace_scaling = false
--~ end
--~ function tfm.prepare_base_kerns(tfmdata)
---~ if tfm.share_base_kerns and not tfmdata.sharedkerns then
+--~ if tfm.sharebasekerns and not tfmdata.sharedkerns then
--~ local sharedkerns = { }
--~ tfmdata.sharedkerns = sharedkerns
--~ for u, chr in next, tfmdata.characters do
@@ -209,7 +211,43 @@ local charactercache = { }
-- a virtual font has italic correction make sure to set the
-- has_italic flag. Some more flags will be added in the future.
-function tfm.calculate_scale(tfmtable, scaledpoints)
+--[[ldx--
+<p>The reason why the scaler was originally split, is that for a while we experimented
+with a helper function. However, in practice the <l n='api'/> calls are too slow to
+make this profitable and the <l n='lua'/> based variant was just faster. A days
+wasted day but an experience richer.</p>
+--ldx]]--
+
+tfm.autocleanup = true
+
+local lastfont = nil
+
+-- we can get rid of the tfm instance when we have fast access to the
+-- scaled character dimensions at the tex end, e.g. a fontobject.width
+--
+-- flushing the kern and ligature tables from memory saves a lot (only
+-- base mode) but it complicates vf building where the new characters
+-- demand this data .. solution: functions that access them
+
+function tfm.cleanuptable(tfmdata) -- we need a cleanup callback, now we miss the last one
+ if tfm.autocleanup then -- ok, we can hook this into everyshipout or so ... todo
+ if tfmdata.type == 'virtual' or tfmdata.virtualized then
+ for k, v in next, tfmdata.characters do
+ if v.commands then v.commands = nil end
+ -- if v.kerns then v.kerns = nil end
+ end
+ else
+ -- for k, v in next, tfmdata.characters do
+ -- if v.kerns then v.kerns = nil end
+ -- end
+ end
+ end
+end
+
+function tfm.cleanup(tfmdata) -- we need a cleanup callback, now we miss the last one
+end
+
+function tfm.calculatescale(tfmtable, scaledpoints)
if scaledpoints < 0 then
scaledpoints = (- scaledpoints/1000) * tfmtable.designsize -- already in sp
end
@@ -218,10 +256,10 @@ function tfm.calculate_scale(tfmtable, scaledpoints)
return scaledpoints, delta, units
end
-function tfm.do_scale(tfmtable, scaledpoints, relativeid)
+function tfm.scale(tfmtable, scaledpoints, relativeid)
-- tfm.prepare_base_kerns(tfmtable) -- optimalization
local t = { } -- the new table
- local scaledpoints, delta, units = tfm.calculate_scale(tfmtable, scaledpoints, relativeid)
+ local scaledpoints, delta, units = tfm.calculatescale(tfmtable, scaledpoints, relativeid)
t.units_per_em = units or 1000
local hdelta, vdelta = delta, delta
-- unicoded unique descriptions shared cidinfo characters changed parameters indices
@@ -303,7 +341,7 @@ function tfm.do_scale(tfmtable, scaledpoints, relativeid)
local scaledheight = defaultheight * vdelta
local scaleddepth = defaultdepth * vdelta
local stackmath = tfmtable.ignore_stack_math ~= true
- local private = fonts.private
+ local private = fonts.privateoffset
local sharedkerns = { }
for k,v in next, characters do
local chr, description, index
@@ -588,55 +626,14 @@ function tfm.do_scale(tfmtable, scaledpoints, relativeid)
report_define("used for accesing subfont: '%s'",t.psname or "nopsname")
report_define("used for subsetting: '%s'",t.fontname or "nofontname")
end
---~ print(t.fontname,table.serialize(t.MathConstants))
- return t, delta
-end
-
---[[ldx--
-<p>The reason why the scaler is split, is that for a while we experimented
-with a helper function. However, in practice the <l n='api'/> calls are too slow to
-make this profitable and the <l n='lua'/> based variant was just faster. A days
-wasted day but an experience richer.</p>
---ldx]]--
-
-tfm.auto_cleanup = true
-
-local lastfont = nil
-
--- we can get rid of the tfm instance when we have fast access to the
--- scaled character dimensions at the tex end, e.g. a fontobject.width
---
--- flushing the kern and ligature tables from memory saves a lot (only
--- base mode) but it complicates vf building where the new characters
--- demand this data .. solution: functions that access them
-
-function tfm.cleanup_table(tfmdata) -- we need a cleanup callback, now we miss the last one
- if tfm.auto_cleanup then -- ok, we can hook this into everyshipout or so ... todo
- if tfmdata.type == 'virtual' or tfmdata.virtualized then
- for k, v in next, tfmdata.characters do
- if v.commands then v.commands = nil end
- -- if v.kerns then v.kerns = nil end
- end
- else
- -- for k, v in next, tfmdata.characters do
- -- if v.kerns then v.kerns = nil end
- -- end
- end
- end
-end
-
-function tfm.cleanup(tfmdata) -- we need a cleanup callback, now we miss the last one
-end
-
-function tfm.scale(tfmtable, scaledpoints, relativeid)
- local t, factor = tfm.do_scale(tfmtable, scaledpoints, relativeid)
- t.factor = factor
- t.ascender = factor*(tfmtable.ascender or 0)
- t.descender = factor*(tfmtable.descender or 0)
+ -- this will move up (side effect of merging split call)
+ t.factor = delta
+ t.ascender = delta*(tfmtable.ascender or 0)
+ t.descender = delta*(tfmtable.descender or 0)
t.shared = tfmtable.shared or { }
t.unique = table.fastcopy(tfmtable.unique or {})
---~ print("scaling", t.name, t.factor) -- , tfm.hash_features(tfmtable.specification))
tfm.cleanup(t)
+ -- print(t.fontname,table.serialize(t.MathConstants))
return t
end
@@ -645,10 +642,12 @@ end
process features right.</p>
--ldx]]--
-fonts.analyzers = fonts.analyzers or { }
-fonts.analyzers.aux = fonts.analyzers.aux or { }
-fonts.analyzers.methods = fonts.analyzers.methods or { }
-fonts.analyzers.initializers = fonts.analyzers.initializers or { }
+fonts.analyzers = fonts.analyzers or { }
+local analyzers = fonts.analyzers
+
+analyzers.aux = analyzers.aux or { }
+analyzers.methods = analyzers.methods or { }
+analyzers.initializers = analyzers.initializers or { }
-- todo: analyzers per script/lang, cross font, so we need an font id hash -> script
-- e.g. latin -> hyphenate, arab -> 1/2/3 analyze
@@ -657,7 +656,7 @@ fonts.analyzers.initializers = fonts.analyzers.initializers or { }
local state = attributes.private('state')
-function fonts.analyzers.aux.setstate(head,font)
+function analyzers.aux.setstate(head,font)
local tfmdata = fontdata[font]
local characters = tfmdata.characters
local descriptions = tfmdata.descriptions
@@ -718,7 +717,7 @@ end
-- checking
-function tfm.checked_filename(metadata,whatever)
+function tfm.checkedfilename(metadata,whatever)
local foundfilename = metadata.foundfilename
if not foundfilename then
local askedfilename = metadata.filename or ""
diff --git a/tex/context/base/font-vf.lua b/tex/context/base/font-vf.lua
index d803636a2..ccbe9a3e6 100644
--- a/tex/context/base/font-vf.lua
+++ b/tex/context/base/font-vf.lua
@@ -11,33 +11,38 @@ if not modules then modules = { } end modules ['font-vf'] = {
changes. This will change.</p>
--ldx]]--
--- define.methods elsewhere !
-
local next = next
local fastcopy = table.fastcopy
+local allocate = utilities.storage.allocate
+
local fonts = fonts
local vf = fonts.vf
local tfm = fonts.tfm
-fonts.define = fonts.define or { }
-local define = fonts.define
-define.methods = define.methods or { }
+fonts.definers = fonts.definers or { }
+local definers = fonts.definers
+
+definers.methods = definers.methods or { }
+local methods = definers.methods
+
+methods.variants = allocate()
+local variants = methods.variants
vf.combinations = vf.combinations or { }
vf.aux = vf.aux or { }
vf.aux.combine = vf.aux.combine or { }
local combine = vf.aux.combine
-function define.methods.install(tag, rules)
+function methods.install(tag, rules)
vf.combinations[tag] = rules
- define.methods[tag] = function(specification)
+ variants[tag] = function(specification)
return vf.combine(specification,tag)
end
end
local function combine_load(g,name)
- return tfm.read_and_define(name or g.specification.name,g.specification.size)
+ return tfm.readanddefine(name or g.specification.name,g.specification.size)
end
local function combine_assign(g, name, from, to, start, force)
@@ -78,7 +83,7 @@ local function combine_process(g,list)
end
local function combine_names(g,name,force)
- local f, id = tfm.read_and_define(name,g.specification.size)
+ local f, id = tfm.readanddefine(name,g.specification.size)
if f and id then
local fc, gc = f.characters, g.characters
local fd, gd = f.descriptions, g.descriptions
@@ -123,7 +128,7 @@ end
--~ combine.names = combine_names
--~ combine.feature = combine_feature
-combine.commands = {
+combine.commands = allocate {
["initialize"] = function(g,v) combine_assign (g,g.name) end,
["include-method"] = function(g,v) combine_process (g,vf.combinations[v[2]]) end, -- name
-- ["copy-parameters"] = function(g,v) combine_parameters(g,v[2]) end, -- name
@@ -152,7 +157,7 @@ end
-- simple example with features
-define.methods.install(
+methods.install(
"ligatures", {
{ "feature", "liga" } ,
{ "feature", "dlig" } ,
@@ -160,7 +165,7 @@ define.methods.install(
}
)
---~ define.methods.install (
+--~ methods.install (
--~ "ligatures-x", {
--~ { "feature", "liga" } ,
--~ { "feature", "dlig" } ,
@@ -169,7 +174,7 @@ define.methods.install(
--~ }
--~ )
---~ define.methods.install(
+--~ methods.install(
--~ "lmsymbol10", {
--~ { "fallback_names", "lmsy10.afm" } ,
--~ { "fallback_names", "msam10.afm" } ,
@@ -180,7 +185,7 @@ define.methods.install(
-- docu case
---~ define.methods.install(
+--~ methods.install(
--~ "weird", {
--~ { "copy-range", "lmroman10-regular" } ,
--~ { "copy-char", "lmroman10-regular", 65, 66 } ,
@@ -194,10 +199,10 @@ define.methods.install(
-- todo: interface tables in back-ini
-define.methods["demo-1"] = function(specification)
+variants["demo-1"] = function(specification)
local name = specification.name -- symbolic name
local size = specification.size -- given size
- local f, id = tfm.read_and_define('lmroman10-regular',size)
+ local f, id = tfm.readanddefine('lmroman10-regular',size)
if f and id then
local capscale, digscale = 0.85, 0.75
-- f.name, f.type = name, 'virtual'
diff --git a/tex/context/base/font-xtx.lua b/tex/context/base/font-xtx.lua
index aa0c3bca3..5a31d8c5e 100644
--- a/tex/context/base/font-xtx.lua
+++ b/tex/context/base/font-xtx.lua
@@ -32,15 +32,15 @@ of the specifier.</p>
--ldx]]--
local fonts = fonts
-local define = fonts.define
-local specify = define.specify
+local definers = fonts.definers
+local specifiers = definers.specifiers
local normalize_meanings = fonts.otf.meanings.normalize
local list = { }
-specify.colonized_default_lookup = "file"
+specifiers.colonizedpreference = "file"
-local function issome () list.lookup = specify.colonized_default_lookup end
+local function issome () list.lookup = specifiers.colonizedpreference end
local function isfile () list.lookup = 'file' end
local function isname () list.lookup = 'name' end
local function thename(s) list.name = s end
@@ -97,6 +97,4 @@ local function colonized(specification) -- xetex mode
return specification
end
-specify.colonized = colonized
-
-define.register_split(":",colonized)
+definers.registersplit(":",colonized)
diff --git a/tex/context/base/grph-fig.mkiv b/tex/context/base/grph-fig.mkiv
index 634a8733a..5064e5c43 100644
--- a/tex/context/base/grph-fig.mkiv
+++ b/tex/context/base/grph-fig.mkiv
@@ -41,14 +41,14 @@
\def\dodoplaceexternalfigure[#1][#2][#3][#4][#5]%
{\bgroup
-\dostarttagged\t!image\empty
+ \dostarttagged\t!image\empty
\pushmacro\textunderscore
\edef\textunderscore{\string_}% brrr, temp hack, still needed?
\calculateexternalfigure[][#1][#2][#3][#4][#5]% [] is dummy dwcomp
\popmacro\textunderscore
-\global\setbox\foundexternalfigure\naturalvbox attr \imageattribute 2 {\box\foundexternalfigure}%
+ \global\setbox\foundexternalfigure\naturalvbox attr \imageattribute 2 {\box\foundexternalfigure}%
\box\foundexternalfigure
-\dostoptagged
+ \dostoptagged
\egroup}
\def\externalfigurereplacement#1#2#3%
@@ -163,7 +163,7 @@
\def\dodouseexternalfigure#1#2#3#4%
{\setvalue{\??ef\??ef#1}{\doplaceexternalfigure[#1][#2][#3][#4]}%
- \doanalyseexternalfigurecollection[#2][#4]}
+ \doanalyzeexternalfigurecollection[#2][#4]}
\newconditional\inexternalfigurecollection
\newdimen\xexternalfigurecollectionminwidth
@@ -171,7 +171,7 @@
\newdimen\xexternalfigurecollectionminheight
\newdimen\xexternalfigurecollectionmaxheight
-\def\doanalyseexternalfigurecollection[#1][#2]%
+\def\doanalyzeexternalfigurecollection[#1][#2]%
{\ifconditional\inexternalfigurecollection
\setfalse\inexternalfigurecollection
\getfiguredimensions[#1][#2]%
@@ -572,7 +572,7 @@
\fi\fi\fi}
\def\dodotypesetfile#1#2#3#4% options settings-a filename settings-b
- {\edef\typesetfilename{\ctxlua{tex.write(job.files.run("#3","#1"))}}%
+ {\edef\typesetfilename{\ctxlua{tex.write(job.files.context("#3","#1"))}}%
\expanded{\externalfigure[\typesetfilename]}[#2,#4]}
\setupexternalfigures
diff --git a/tex/context/base/grph-fil.lua b/tex/context/base/grph-fil.lua
index 4832ac3bc..4bd1e7d0f 100644
--- a/tex/context/base/grph-fil.lua
+++ b/tex/context/base/grph-fil.lua
@@ -10,36 +10,53 @@ local format, concat = string.format, table.concat
local trace_run = false trackers.register("files.run",function(v) trace_run = v end)
-local command = "context %s"
+local allocate, mark = utilities.storage.allocate, utilities.storage.mark
+
+local collected, tobesaved = allocate(), allocate()
local jobfiles = {
- collected = { },
- tobesaved = { },
+ collected = collected,
+ tobesaved = tobesaved,
}
job.files = jobfiles
-local tobesaved, collected = jobfiles.tobesaved, jobfiles.collected
-
local function initializer()
- tobesaved, collected = jobfiles.tobesaved, jobfiles.collected
+ tobesaved = mark(jobfiles.tobesaved)
+ collected = mark(jobfiles.collected)
end
-job.register('job.files.collected', jobfiles.tobesaved, initializer)
+job.register('job.files.collected', tobesaved, initializer)
jobfiles.forcerun = false
-function jobfiles.run(name,...)
+function jobfiles.run(name,command)
local oldchecksum = collected[name]
local newchecksum = file.checksum(name)
if jobfiles.forcerun or not oldchecksum or oldchecksum ~= newchecksum then
if trace_run then
- commands.writestatus("buffers","changes in '%s', processing forced",name)
+ commands.writestatus("processing","changes in '%s', processing forced",name)
+ end
+ if command and command ~= "" then
+ os.execute(command)
+ else
+ commands.writestatus("processing","no command given for processing '%s'",name)
end
- os.execute(format(command,concat({ name, ... }," ")))
elseif trace_run then
- commands.writestatus("buffers","no changes in '%s', not processed",name)
+ commands.writestatus("processing","no changes in '%s', not processed",name)
end
tobesaved[name] = newchecksum
- return file.replacesuffix(name,"pdf")
+end
+
+function jobfiles.context(name,options)
+ if type(name) == "table" then
+ local result = { }
+ for i=1,#name do
+ result[#result+1] = jobfiles.context(name[i],options)
+ end
+ return result
+ else
+ jobfiles.run(name,"context ".. (options or "") .. " " .. name)
+ return file.replacesuffix(name,"pdf")
+ end
end
diff --git a/tex/context/base/grph-inc.lua b/tex/context/base/grph-inc.lua
index b1eccca08..f1847cb40 100644
--- a/tex/context/base/grph-inc.lua
+++ b/tex/context/base/grph-inc.lua
@@ -45,6 +45,7 @@ local contains = table.contains
local concat = table.concat
local todimen = string.todimen
local settings_to_array = utilities.parsers.settings_to_array
+local allocate = utilities.storage.allocate
local ctxcatcodes = tex.ctxcatcodes
local variables = interfaces.variables
@@ -96,16 +97,22 @@ function img.checksize(size)
end
end
+local indexed = { }
+
+function img.ofindex(n)
+ return indexed[n]
+end
+
--- we can consider an grph-ini file
-figures = figures or { }
+figures = figures or { }
local figures = figures
-figures.loaded = figures.loaded or { }
-figures.used = figures.used or { }
-figures.found = figures.found or { }
-figures.suffixes = figures.suffixes or { }
-figures.patterns = figures.patterns or { }
+figures.loaded = allocate()
+figures.used = allocate()
+figures.found = allocate()
+figures.suffixes = allocate()
+figures.patterns = allocate()
figures.boxnumber = figures.boxnumber or 0
figures.defaultsearch = true
@@ -113,24 +120,24 @@ figures.defaultwidth = 0
figures.defaultheight = 0
figures.defaultdepth = 0
figures.nofprocessed = 0
-figures.prefer_quality = true -- quality over location
+figures.preferquality = true -- quality over location
-figures.localpaths = {
+figures.localpaths = allocate {
".", "..", "../.."
}
-figures.cachepaths = {
+figures.cachepaths = allocate {
prefix = "",
path = ".",
subpath = ".",
}
-figures.paths = table.copy(figures.localpaths)
+figures.paths = allocate(table.copy(figures.localpaths))
-figures.order = {
+figures.order = allocate{
"pdf", "mps", "jpg", "png", "jbig", "svg", "eps", "gif", "mov", "buffer", "tex",
}
-figures.formats = {
+figures.formats = allocate{
["pdf"] = { list = { "pdf" } },
["mps"] = { patterns = { "mps", "%d+" } },
["jpg"] = { list = { "jpg", "jpeg" } },
@@ -145,10 +152,10 @@ figures.formats = {
}
function figures.setlookups()
- figures.suffixes, figures.patterns = { }, { }
+ local fs, fp = allocate(), allocate()
+ figures.suffixes, figures.patterns = fs, fp
for _, format in next, figures.order do
local data = figures.formats[format]
- local fs, fp = figures.suffixes, figures.patterns
local list = data.list
if list then
for i=1,#list do
@@ -441,7 +448,7 @@ end
local resolve_too = true -- urls
local function locate(request) -- name, format, cache
- local askedname = resolvers.clean_path(request.name)
+ local askedname = resolvers.cleanpath(request.name)
local foundname = figures.found[askedname .. "->" .. (request.conversion or "default")]
if foundname then
return foundname
@@ -526,7 +533,7 @@ local function locate(request) -- name, format, cache
end
end
if figures.defaultsearch then
- local check = resolvers.find_file(askedname)
+ local check = resolvers.findfile(askedname)
if check and check ~= "" then
return register(askedname, {
askedname = askedname,
@@ -561,7 +568,7 @@ local function locate(request) -- name, format, cache
end
end
else
- if figures.prefer_quality then
+ if figures.preferquality then
if trace_figures then
commands.writestatus("figures","strategy: unknown format, prefer quality")
end
@@ -631,7 +638,7 @@ local function locate(request) -- name, format, cache
local list = figures.formats[format].list or { format }
for k=1,#list do
local suffix = list[k]
- local check = resolvers.find_file(file.replacesuffix(askedname,suffix))
+ local check = resolvers.findfile(file.replacesuffix(askedname,suffix))
if check and check ~= "" then
return register(askedname, {
askedname = askedname,
@@ -650,22 +657,22 @@ end
-- -- -- plugins -- -- --
-figures.existers = figures.existers or { }
+figures.existers = allocate()
local existers = figures.existers
-figures.checkers = figures.checkers or { }
+figures.checkers = allocate()
local checkers = figures.checkers
-figures.includers = figures.includers or { }
+figures.includers = allocate()
local includers = figures.includers
-figures.converters = figures.converters or { }
+figures.converters = allocate()
local converters = figures.converters
-figures.identifiers = figures.identifiers or { }
+figures.identifiers = allocate()
local identifiers = figures.identifiers
-figures.programs = figures.programs or { }
+figures.programs = allocate()
local programs = figures.programs
function identifiers.default(data)
@@ -813,6 +820,7 @@ function includers.generic(data)
local nr = figures.boxnumber
-- it looks like we have a leak in attributes here .. todo
local box = node.hpack(img.node(figure)) -- img.node(figure) not longer valid
+ indexed[figure.index] = figure
box.width, box.height, box.depth = figure.width, figure.height, 0 -- new, hm, tricky, we need to do that in tex (yet)
texbox[nr] = box
ds.objectnumber = figure.objnum
@@ -918,7 +926,7 @@ includers.buffers = includers.nongeneric
-- -- -- tex -- -- --
function existers.tex(askedname)
- askedname = resolvers.find_file(askedname)
+ askedname = resolvers.findfile(askedname)
return (askedname ~= "" and askedname) or false
end
function checkers.tex(data)
diff --git a/tex/context/base/grph-swf.lua b/tex/context/base/grph-swf.lua
index dc36afa0e..deff3defa 100644
--- a/tex/context/base/grph-swf.lua
+++ b/tex/context/base/grph-swf.lua
@@ -11,7 +11,6 @@ local format = string.format
local texsprint = tex.sprint
local ctxcatcodes = tex.ctxcatcodes
local nodeinjections = backends.nodeinjections
-local pdfannotation = nodes.pool.pdfannotation
local figures = figures
diff --git a/tex/context/base/grph-u3d.lua b/tex/context/base/grph-u3d.lua
index 75bfda287..e20655e3f 100644
--- a/tex/context/base/grph-u3d.lua
+++ b/tex/context/base/grph-u3d.lua
@@ -15,7 +15,6 @@ local report_graphics = logs.new("graphics")
local figures = figures
local nodeinjections = backends.nodeinjections
-local pdfannotation = nodepool.pdfannotation
local todimen = string.todimen
-- maybe todo: backends.codeinjections.insertu3d
diff --git a/tex/context/base/java-ini.lua b/tex/context/base/java-ini.lua
index 0c7cdcfa4..bdd3488f5 100644
--- a/tex/context/base/java-ini.lua
+++ b/tex/context/base/java-ini.lua
@@ -10,13 +10,16 @@ local format = string.format
local concat = table.concat
local lpegmatch, lpegP, lpegR, lpegS, lpegC = lpeg.match, lpeg.P, lpeg.R, lpeg.S, lpeg.C
+local allocate = utilities.storage.allocate
+
-- todo: don't flush scripts if no JS key
interactions.javascripts = interactions.javascripts or { }
local javascripts = interactions.javascripts
-javascripts.codes = javascripts.codes or { }
-javascripts.preambles = javascripts.preambles or { }
-javascripts.functions = javascripts.functions or { }
+
+javascripts.codes = allocate()
+javascripts.preambles = allocate()
+javascripts.functions = allocate()
local codes, preambles, functions = javascripts.codes, javascripts.preambles, javascripts.functions
@@ -49,7 +52,7 @@ local parsefunctions = (fname + any)^0
function javascripts.storecode(str)
local name, uses, script = lpegmatch(parsecode,str)
if name and name ~= "" then
- javascripts.codes[name] = { uses, script }
+ codes[name] = { uses, script }
end
end
diff --git a/tex/context/base/l-file.lua b/tex/context/base/l-file.lua
index edc1dc9e4..edf3f8a06 100644
--- a/tex/context/base/l-file.lua
+++ b/tex/context/base/l-file.lua
@@ -161,26 +161,14 @@ file.iswritable = file.is_writable -- depricated
-- todo: lpeg
---~ function file.split_path(str)
---~ local t = { }
---~ str = gsub(str,"\\", "/")
---~ str = gsub(str,"(%a):([;/])", "%1\001%2")
---~ for name in gmatch(str,"([^;:]+)") do
---~ if name ~= "" then
---~ t[#t+1] = gsub(name,"\001",":")
---~ end
---~ end
---~ return t
---~ end
-
local checkedsplit = string.checkedsplit
-function file.split_path(str,separator)
+function file.splitpath(str,separator) -- string
str = gsub(str,"\\","/")
return checkedsplit(str,separator or io.pathseparator)
end
-function file.join_path(tab)
+function file.joinpath(tab) -- table
return concat(tab,io.pathseparator) -- can have trailing //
end
diff --git a/tex/context/base/l-io.lua b/tex/context/base/l-io.lua
index cf8852fd6..fe988b107 100644
--- a/tex/context/base/l-io.lua
+++ b/tex/context/base/l-io.lua
@@ -186,7 +186,7 @@ function io.ask(question,default,options)
end
end
-function io.readnumber(f,n,m)
+local function readnumber(f,n,m)
if m then
f:seek("set",n)
n = m
@@ -198,18 +198,20 @@ function io.readnumber(f,n,m)
return 256*a + b
elseif n == 4 then
local a, b, c, d = byte(f:read(4),1,4)
- return 256^3 * a + 256^2 * b + 256*c + d
+ return 256*256*256 * a + 256*256 * b + 256*c + d
elseif n == 8 then
local a, b = readnumber(f,4), readnumber(f,4)
- return 256 * b + c
+ return 256 * a + b
elseif n == 12 then
local a, b, c = readnumber(f,4), readnumber(f,4), readnumber(f,4)
- return 256^2 * a + 256 * b + c
+ return 256*256 * a + 256 * b + c
else
return 0
end
end
+io.readnumber = readnumber
+
function io.readstring(f,n,m)
if m then
f:seek("set",n)
diff --git a/tex/context/base/l-lpeg.lua b/tex/context/base/l-lpeg.lua
index cffcc86e1..0a4ee0ba3 100644
--- a/tex/context/base/l-lpeg.lua
+++ b/tex/context/base/l-lpeg.lua
@@ -29,7 +29,9 @@ patterns.sign = sign
patterns.cardinal = sign^0 * digit^1
patterns.integer = sign^0 * digit^1
patterns.float = sign^0 * digit^0 * P('.') * digit^1
+patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
patterns.number = patterns.float + patterns.integer
+patterns.cnumber = patterns.cfloat + patterns.integer
patterns.oct = P("0") * R("07")^1
patterns.octal = patterns.oct
patterns.HEX = P("0x") * R("09","AF")^1
@@ -248,6 +250,10 @@ function lpeg.secondofsplit(separator) -- nil if not split
return splitter
end
+function lpeg.balancer(left,right)
+ return P { left * ((1 - left - right) + V(1))^0 * right }
+end
+
--~ print(1,match(lpeg.firstofsplit(":"),"bc:de"))
--~ print(2,match(lpeg.firstofsplit(":"),":de")) -- empty
--~ print(3,match(lpeg.firstofsplit(":"),"bc"))
diff --git a/tex/context/base/lang-ini.lua b/tex/context/base/lang-ini.lua
index afb933276..d66b2645b 100644
--- a/tex/context/base/lang-ini.lua
+++ b/tex/context/base/lang-ini.lua
@@ -97,7 +97,7 @@ local function loaddefinitions(tag,specification)
elseif not dataused[definition] then
dataused[definition] = definition
local filename = "lang-" .. definition .. ".lua"
- local fullname = resolvers.find_file(filename) or ""
+ local fullname = resolvers.findfile(filename) or ""
if fullname ~= "" then
if trace_patterns then
report_languages("loading definition '%s' for language '%s' from '%s'",definition,tag,fullname)
@@ -178,7 +178,7 @@ end
function languages.loadable(tag,defaultlanguage) -- hack
local l = registered[tag] -- no synonyms
- if l and resolvers.find_file("lang-"..l.patterns..".lua") then
+ if l and resolvers.findfile("lang-"..l.patterns..".lua") then
return true
else
return false
@@ -343,20 +343,20 @@ end)
--~ local parser = (1-command)^0 * command * content
--~
--~ local function filterpatterns(filename)
---~ return lpegmatch(parser,io.loaddata(resolvers.find_file(filename)) or "")
+--~ return lpegmatch(parser,io.loaddata(resolvers.findfile(filename)) or "")
--~ end
--~
--~ local command = lpeg.P("\\hyphenation")
--~ local parser = (1-command)^0 * command * content
--~
--~ local function filterexceptions(filename)
---~ return lpegmatch(parser,io.loaddata(resolvers.find_file(filename)) or "") -- "" ?
+--~ return lpegmatch(parser,io.loaddata(resolvers.findfile(filename)) or "") -- "" ?
--~ end
--~
--~ local function loadthem(tag, filename, filter, target)
--~ statistics.starttiming(languages)
--~ local data, instance = resolve(tag)
---~ local fullname = (filename and filename ~= "" and resolvers.find_file(filename)) or ""
+--~ local fullname = (filename and filename ~= "" and resolvers.findfile(filename)) or ""
--~ local ok = fullname ~= ""
--~ if ok then
--~ if trace_patterns then
diff --git a/tex/context/base/lang-ini.mkiv b/tex/context/base/lang-ini.mkiv
index 37f3fdb10..0a0cbf016 100644
--- a/tex/context/base/lang-ini.mkiv
+++ b/tex/context/base/lang-ini.mkiv
@@ -392,6 +392,8 @@
\definecomplexorsimple\language
+\newcount\mainlanguagenumber
+
\def\mainlanguage[#1]%
{\edef\askedlanguage{#1}%
\ifx\askedlanguage\empty \else
@@ -408,7 +410,8 @@
\docomplexlanguage
\fi
\fi
- \fi}
+ \fi
+ \mainlanguagenumber\normallanguage}
%D New (see nomarking and nolist):
diff --git a/tex/context/base/lang-url.lua b/tex/context/base/lang-url.lua
index d8723db78..2453739db 100644
--- a/tex/context/base/lang-url.lua
+++ b/tex/context/base/lang-url.lua
@@ -26,7 +26,7 @@ dealing with <l n='ascii'/> characters.</p>
commands.hyphenatedurl = commands.hyphenatedurl or { }
local hyphenatedurl = commands.hyphenatedurl
-hyphenatedurl.characters = {
+hyphenatedurl.characters = utilities.storage.allocate {
["!"] = 1,
["\""] = 1,
["#"] = 1,
diff --git a/tex/context/base/lang-wrd.lua b/tex/context/base/lang-wrd.lua
index 5e6cf7934..9efde5a05 100644
--- a/tex/context/base/lang-wrd.lua
+++ b/tex/context/base/lang-wrd.lua
@@ -32,9 +32,9 @@ local tasks = nodes.tasks
local nodecodes = nodes.nodecodes
local kerncodes = nodes.kerncodes
-local glyph_node = nodecodes.glyph
-local disc_node = nodecodes.disc
-local kern_node = nodecodes.kern
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local kern_code = nodecodes.kern
local kerning_code = kerncodes.kerning
@@ -53,7 +53,7 @@ local word = Cs((markup/"" + disc/"" + (1-spacing))^1)
local loaded = { } -- we share lists
function words.load(tag,filename)
- local fullname = resolvers.find_file(filename,'other text file') or ""
+ local fullname = resolvers.findfile(filename,'other text file') or ""
if fullname ~= "" then
statistics.starttiming(languages)
local list = loaded[fullname]
@@ -99,7 +99,7 @@ local function mark_words(head,whenfound) -- can be optimized
end
while current do
local id = current.id
- if id == glyph_node then
+ if id == glyph_code then
local a = current.lang
if a then
if a ~= language then
@@ -130,11 +130,11 @@ local function mark_words(head,whenfound) -- can be optimized
action()
end
end
- elseif id == disc_node then
+ elseif id == disc_code then
if n > 0 then
n = n + 1
end
- elseif id == kern_node and current.subtype == kerning_code and start then
+ elseif id == kern_code and current.subtype == kerning_code and start then
-- ok
elseif start then
action()
@@ -174,7 +174,7 @@ methods[1] = function(head, attribute, yes, nop)
return head, done
end
-local list, dump = { }, false -- todo: per language
+local list = { } -- todo: per language
local lowerchar = characters.lower
@@ -189,14 +189,16 @@ methods[2] = function(head, attribute)
return head, true
end
-words.used = list
+-- words.used = list
-function words.dump_used_words(name)
- if dump then
+directives.register("languages.words.dump", function(v)
+ local name = type(v) == "string" and v ~= "" and v or file.addsuffix(tex.jobname,"words")
+ local function dumpusedwords(name)
report_languages("saving list of used words in '%s'",name)
io.savedata(name,table.serialize(list))
end
-end
+ luatex.registerstopactions(dumpusedwords)
+end )
local color = attributes.private('color')
diff --git a/tex/context/base/lang-wrd.mkiv b/tex/context/base/lang-wrd.mkiv
index b6fc6a9d3..a706c21a7 100644
--- a/tex/context/base/lang-wrd.mkiv
+++ b/tex/context/base/lang-wrd.mkiv
@@ -47,8 +47,4 @@
[\c!state=\v!stop,
\c!method=1]
-\appendtoks
- \ctxlua{languages.words.dump_used_words("\jobname.words")}%
-\to \everybye
-
\protect \endinput
diff --git a/tex/context/base/lpdf-ano.lua b/tex/context/base/lpdf-ano.lua
index a57372ea1..ef1f31ab6 100644
--- a/tex/context/base/lpdf-ano.lua
+++ b/tex/context/base/lpdf-ano.lua
@@ -6,7 +6,8 @@ if not modules then modules = { } end modules ['lpdf-ano'] = {
license = "see context related readme files"
}
-local tostring, format, rep = tostring, string.rep, string.format
+local next, tostring = next, tostring
+local rep, format = string.rep, string.format
local texcount = tex.count
local backends, lpdf = backends, lpdf
@@ -24,24 +25,20 @@ local constants = interfaces.constants
local settings_to_array = utilities.parsers.settings_to_array
-local nodeinjections = backends.pdf.nodeinjections
-local codeinjections = backends.pdf.codeinjections
-local registrations = backends.pdf.registrations
+local nodeinjections = backends.pdf.nodeinjections
+local codeinjections = backends.pdf.codeinjections
+local registrations = backends.pdf.registrations
-local javascriptcode = interactions.javascripts.code
+local javascriptcode = interactions.javascripts.code
-local references = structures.references
-local bookmarks = structures.bookmarks
+local references = structures.references
+local bookmarks = structures.bookmarks
+local references = structures.references
-references.runners = references.runners or { }
-references.specials = references.specials or { }
-references.handlers = references.handlers or { }
-references.executers = references.executers or { }
-
-local runners = references.runners
-local specials = references.specials
-local handlers = references.handlers
-local executers = references.executers
+local runners = references.runners
+local specials = references.specials
+local handlers = references.handlers
+local executers = references.executers
local pdfdictionary = lpdf.dictionary
local pdfarray = lpdf.array
@@ -49,14 +46,20 @@ local pdfreference = lpdf.reference
local pdfunicode = lpdf.unicode
local pdfconstant = lpdf.constant
local pdfflushobject = lpdf.flushobject
+local pdfshareobjectref = lpdf.shareobjectreference
+local pdfimmediateobject = lpdf.immediateobject
local pdfreserveobject = lpdf.reserveobject
local pdfpagereference = lpdf.pagereference
+local pdfregisterannot = pdf.registerannot
+
local nodepool = nodes.pool
local pdfannotation_node = nodepool.pdfannotation
local pdfdestination_node = nodepool.pdfdestination
+local latelua_node = nodepool.latelua
+local pdf_annot = pdfconstant("Annot")
local pdf_uri = pdfconstant("URI")
local pdf_gotor = pdfconstant("GoToR")
local pdf_goto = pdfconstant("GoTo")
@@ -67,6 +70,8 @@ local pdf_n = pdfconstant("N")
local pdf_t = pdfconstant("T")
local pdf_border = pdfarray { 0, 0, 0 }
+local getinnermethod = references.getinnermethod
+
local cache = { }
local function pagedestination(n) -- only cache fit
@@ -77,7 +82,7 @@ local function pagedestination(n) -- only cache fit
pdfreference(pdfpagereference(n)),
pdfconstant("Fit")
}
- pd = pdfreference(pdfflushobject(a))
+ pd = pdfshareobjectref(a)
cache[n] = pd
end
return pd
@@ -86,25 +91,6 @@ end
lpdf.pagedestination = pagedestination
---~ local cache = { }
-
---~ local function gotopagedestination(n) -- could be reference instead
---~ if n > 0 then
---~ local pd = cache[n]
---~ if not pd then
---~ local d = pdfdictionary { -- can be cached
---~ S = pdf_goto,
---~ D = pagedestination(p),
---~ }
---~ pd = pdfreference(pdfflushobject(d))
---~ cache[n] = pd
---~ end
---~ return pd
---~ end
---~ end
-
---~ lpdf.gotopagedestination = gotopagedestination
-
local defaultdestination = pdfarray { 0, pdfconstant("Fit") }
local function link(url,filename,destination,page,actions)
@@ -166,11 +152,20 @@ local function link(url,filename,destination,page,actions)
else
texcount.referencepagestate = 1
end
- --~ return gotopagedestination(p)
- return pdfdictionary { -- can be cached
- S = pdf_goto,
- D = pagedestination(p),
- }
+ if p > 0 then
+ --~ return gotopagedestination(p)
+ --~ return pdfdictionary { -- can be cached
+ --~ S = pdf_goto,
+ --~ D = pagedestination(p),
+ --~ }
+ return pdfdictionary { -- can be cached
+ S = pdf_goto,
+ D = pdfarray {
+ pdfreference(pdfpagereference(p)),
+ pdfconstant("Fit")
+ }
+ }
+ end
else
commands.writestatus("references","invalid page reference: %s",page or "?")
end
@@ -236,50 +231,87 @@ end
lpdf.action = pdfaction
-function codeinjections.prerollreference(actions)
+function codeinjections.prerollreference(actions) -- share can become option
local main = actions and pdfaction(actions)
if main then
main = pdfdictionary {
Subtype = pdf_link,
Border = pdf_border,
H = (not actions.highlight and pdf_n) or nil,
- A = main,
+ A = pdfshareobjectref(main),
F = 4, -- print (mandate in pdf/a)
- -- does not work at all in spite of specification
- -- OC = (actions.layer and lpdf.layerreferences[actions.layer]) or nil,
- -- OC = backends.pdf.layerreference(actions.layer),
}
- return main("A") -- todo: cache this, maybe weak
+ return main("A")
end
end
--- local cache = { } -- no real gain in thsi
---
--- function codeinjections.prerollreference(actions)
--- local main = actions and pdfaction(actions)
--- if main then
--- main = pdfdictionary {
--- Subtype = pdf_link,
--- Border = pdf_border,
--- H = (not actions.highlight and pdf_n) or nil,
--- A = main,
--- }
--- local cm = cache[main]
--- if not cm then
--- cm = "/A ".. tostring(pdfreference(pdfflushobject(main))
--- cache[main] = cm
--- end
--- return cm
--- end
--- end
-
-function nodeinjections.reference(width,height,depth,prerolled)
- if prerolled then
+local shareannotations experiments.register("backend.shareannotations",function() shareannotations = true end)
+
+if not shareannotations then
+
+ function nodeinjections.reference(width,height,depth,prerolled) -- keep this one
+ if prerolled then
+ if trace_references then
+ report_references("w=%s, h=%s, d=%s, a=%s",width,height,depth,prerolled)
+ end
+ return pdfannotation_node(width,height,depth,prerolled)
+ end
+ end
+
+else
+
+ local delayed = { }
+ local hashed = { }
+ local sharing = true -- we can do this for special refs (so we need an extra argument)
+
+ local function flush()
+ local n = 0
+ for k,v in next, delayed do
+ pdfimmediateobject(k,v)
+ n = n + 1
+ end
if trace_references then
- report_references("w=%s, h=%s, d=%s, a=%s",width,height,depth,prerolled)
+ report_references("%s annotations flushed",n)
+ end
+ delayed = { }
+ end
+
+ lpdf.registerpagefinalizer (flush,3,"annotations") -- somehow this lags behind .. I need to look into that some day
+ lpdf.registerdocumentfinalizer(flush,3,"annotations") -- so we need a final flush too
+
+ local factor = number.dimenfactors.bp
+
+ function codeinjections.finishreference(width,height,depth,prerolled)
+ local h, v = pdf.h, pdf.v
+ local llx, lly = h*factor, (v - depth)*factor
+ local urx, ury = (h + width)*factor, (v + height)*factor
+ local annot = format("<< /Type /Annot %s /Rect [%s %s %s %s] >>",prerolled,llx,lly,urx,ury)
+ local n = sharing and hashed[annot]
+ if not n then
+ n = pdfreserveobject() -- todo: share
+ delayed[n] = annot
+--~ n = pdf.obj(annot)
+--~ pdf.refobj(n)
+ if sharing then
+ hashed[annot] = n
+ end
+ end
+ pdfregisterannot(n)
+ end
+
+ _bpnf_ = codeinjections.finishreference
+
+ function nodeinjections.reference(width,height,depth,prerolled)
+ if prerolled then
+ if trace_references then
+ report_references("w=%s, h=%s, d=%s, a=%s",width,height,depth,prerolled)
+ end
+--~ local luacode = format("backends.pdf.codeinjections.finishreference(%s,%s,%s,'%s')",width,height,depth,prerolled)
+ local luacode = format("_bpnf_(%s,%s,%s,'%s')",width,height,depth,prerolled)
+ return latelua_node(luacode)
end
- return pdfannotation_node(width,height,depth,prerolled)
end
+
end
function nodeinjections.destination(width,height,depth,name,view)
@@ -291,15 +323,15 @@ end
-- runners and specials
-local method = "internal"
-
runners["inner"] = function(var,actions)
- if method == "internal" then
+ if getinnermethod() == "names" then
local vir = var.i.references
local internal = vir and vir.internal
if internal then
var.inner = "aut:"..internal
end
+ else
+ var.inner = nil
end
return link(nil,nil,var.inner,var.r,actions)
end
@@ -362,7 +394,7 @@ function specials.internal(var,actions) -- better resolve in strc-ref
local v = references.internals[i]
if not v then
-- error
- elseif method == "internal" then
+ elseif getinnermethod() == "names" then
-- named
return link(nil,nil,"aut:"..i,v.references.realpage,actions)
else
diff --git a/tex/context/base/lpdf-fld.lua b/tex/context/base/lpdf-fld.lua
index 01c791d69..4044c691b 100644
--- a/tex/context/base/lpdf-fld.lua
+++ b/tex/context/base/lpdf-fld.lua
@@ -21,32 +21,33 @@ local report_fields = logs.new("fields")
local backends, lpdf = backends, lpdf
-local variables = interfaces.variables
+local variables = interfaces.variables
-local references = structures.references
-local settings_to_array = utilities.parsers.settings_to_array
+local references = structures.references
+local settings_to_array = utilities.parsers.settings_to_array
-local nodeinjections = backends.pdf.nodeinjections
-local codeinjections = backends.pdf.codeinjections
-local registrations = backends.pdf.registrations
+local nodeinjections = backends.pdf.nodeinjections
+local codeinjections = backends.pdf.codeinjections
+local registrations = backends.pdf.registrations
-local registeredsymbol = codeinjections.registeredsymbol
+local registeredsymbol = codeinjections.registeredsymbol
-local pdfstream = lpdf.stream
-local pdfdictionary = lpdf.dictionary
-local pdfarray = lpdf.array
-local pdfreference = lpdf.reference
-local pdfunicode = lpdf.unicode
-local pdfstring = lpdf.string
-local pdfconstant = lpdf.constant
-local pdftoeight = lpdf.toeight
-local pdfflushobject = lpdf.flushobject
-local pdfsharedobject = lpdf.sharedobject
-local pdfreserveobject = lpdf.reserveobject
+local pdfstream = lpdf.stream
+local pdfdictionary = lpdf.dictionary
+local pdfarray = lpdf.array
+local pdfreference = lpdf.reference
+local pdfunicode = lpdf.unicode
+local pdfstring = lpdf.string
+local pdfconstant = lpdf.constant
+local pdftoeight = lpdf.toeight
+local pdfflushobject = lpdf.flushobject
+local pdfshareobjectref = lpdf.shareobjectreference
+local pdfreserveobject = lpdf.reserveobject
+local pdfreserveannotation = lpdf.reserveannotation
-local nodepool = nodes.pool
+local nodepool = nodes.pool
-local pdfannotation_node = nodepool.pdfannotation
+local pdfannotation_node = nodepool.pdfannotation
local submitoutputformat = 0 -- 0=unknown 1=HTML 2=FDF 3=XML => not yet used, needs to be checked
@@ -254,7 +255,7 @@ local function fieldappearances(specification)
local appearance = pdfdictionary { -- cache this one
N = registeredsymbol(n), R = registeredsymbol(r), D = registeredsymbol(d),
}
- return pdfsharedobject(tostring(appearance))
+ return pdfshareobjectref(appearance)
end
local function fieldstates(specification,forceyes,values,default)
@@ -316,7 +317,7 @@ local function fieldstates(specification,forceyes,values,default)
R = pdfdictionary { [forceyes or yesr] = registeredsymbol(yesr), Off = registeredsymbol(offr) },
D = pdfdictionary { [forceyes or yesd] = registeredsymbol(yesd), Off = registeredsymbol(offd) }
}
- local appearanceref = pdfsharedobject(tostring(appearance))
+ local appearanceref = pdfshareobjectref(appearance)
return appearanceref, default
end
@@ -645,7 +646,8 @@ local function save_parent(field,specification,d)
end
local function save_kid(field,specification,d)
- local kn = pdfreserveobject()
+--~ local kn = pdfreserveobject()
+ local kn = pdfreserveannotation()
field.kids[#field.kids+1] = pdfreference(kn)
node.write(pdfannotation_node(specification.width,specification.height,0,d(),kn))
end
diff --git a/tex/context/base/lpdf-pdx.lua b/tex/context/base/lpdf-fmt.lua
index 34461c43d..aaa134f77 100644
--- a/tex/context/base/lpdf-pdx.lua
+++ b/tex/context/base/lpdf-fmt.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['lpdf-pdx'] = {
+if not modules then modules = { } end modules ['lpdf-fmt'] = {
version = 1.001,
comment = "companion to lpdf-ini.mkiv",
author = "Peter Rolf and Hans Hagen",
@@ -8,8 +8,8 @@ if not modules then modules = { } end modules ['lpdf-pdx'] = {
-- context --directives="backend.format=PDF/X-1a:2001" --trackers=backend.format yourfile
-local trace_pdfx = false trackers.register("backend.pdfx", function(v) trace_pdfx = v end)
-local trace_format = false trackers.register("backend.format", function(v) trace_format = v end)
+local trace_format = false trackers.register("backend.format", function(v) trace_format = v end)
+local trace_variables = false trackers.register("backend.variables", function(v) trace_variables = v end)
local report_backends = logs.new("backends")
@@ -50,18 +50,17 @@ local prefixes = {
cmyk = "DefaultCMYK",
}
-local pdfxspecification, pdfxformat = nil, nil
+local formatspecification, formatname = nil, nil
-- * correspondent document wide flags (write once) needed for permission tests
-local pdfx = {
+local formats = utilities.storage.allocate {
["version"] = {
external_icc_profiles = 1.4, -- 'p' in name; URL reference of output intent
jbig2_compression = 1.4,
jpeg2000_compression = 1.5, -- not supported yet
nchannel_colorspace = 1.6, -- 'n' in name; n-channel colorspace support
open_prepress_interface = 1.3, -- 'g' in name; reference to external graphics
- opentype_fonts = 1.6,
optional_content = 1.5,
transparency = 1.4,
object_compression = 1.5,
@@ -69,6 +68,7 @@ local pdfx = {
["default"] = {
pdf_version = 1.7, -- todo: block tex primitive
format_name = "default",
+ xmp_file = "lpdf-pdx.xml",
gray_scale = true,
cmyk_colors = true,
rgb_colors = true,
@@ -80,7 +80,6 @@ local pdfx = {
external_icc_profiles = true, -- controls profile inclusion
include_intents = true,
open_prepress_interface = true, -- unknown
- opentype_fonts = true, -- out of our control
optional_content = true, -- todo: block at lua level
transparency = true, -- todo: block at lua level
jbig2_compression = true, -- todo: block at lua level
@@ -92,6 +91,7 @@ local pdfx = {
["pdf/x-1a:2001"] = {
pdf_version = 1.3,
format_name = "PDF/X-1a:2001",
+ xmp_file = "lpdf-pdx.xml",
gray_scale = true,
cmyk_colors = true,
spot_colors = true,
@@ -104,6 +104,7 @@ local pdfx = {
["pdf/x-1a:2003"] = {
pdf_version = 1.4,
format_name = "PDF/X-1a:2003",
+ xmp_file = "lpdf-pdx.xml",
gray_scale = true,
cmyk_colors = true,
spot_colors = true,
@@ -116,6 +117,7 @@ local pdfx = {
["pdf/x-3:2002"] = {
pdf_version = 1.3,
format_name = "PDF/X-3:2002",
+ xmp_file = "lpdf-pdx.xml",
gray_scale = true,
cmyk_colors = true,
rgb_colors = true,
@@ -131,6 +133,7 @@ local pdfx = {
["pdf/x-3:2003"] = {
pdf_version = 1.4,
format_name = "PDF/X-3:2003",
+ xmp_file = "lpdf-pdx.xml",
gray_scale = true,
cmyk_colors = true,
rgb_colors = true,
@@ -147,6 +150,7 @@ local pdfx = {
["pdf/x-4"] = {
pdf_version = 1.6,
format_name = "PDF/X-4",
+ xmp_file = "lpdf-pdx.xml",
gray_scale = true,
cmyk_colors = true,
rgb_colors = true,
@@ -155,7 +159,6 @@ local pdfx = {
cielab_colors = true,
internal_icc_profiles = true,
include_intents = true,
- opentype_fonts = true,
optional_content = true,
transparency = true,
jbig2_compression = true,
@@ -170,6 +173,7 @@ local pdfx = {
["pdf/x-4p"] = {
pdf_version = 1.6,
format_name = "PDF/X-4p",
+ xmp_file = "lpdf-pdx.xml",
gray_scale = true,
cmyk_colors = true,
rgb_colors = true,
@@ -179,7 +183,6 @@ local pdfx = {
internal_icc_profiles = true,
external_icc_profiles = true,
include_intents = true,
- opentype_fonts = true,
optional_content = true,
transparency = true,
jbig2_compression = true,
@@ -194,6 +197,7 @@ local pdfx = {
["pdf/x-5g"] = {
pdf_version = 1.6,
format_name = "PDF/X-5g",
+ xmp_file = "lpdf-pdx.xml",
gray_scale = true,
cmyk_colors = true,
rgb_colors = true,
@@ -203,7 +207,6 @@ local pdfx = {
internal_icc_profiles = true,
include_intents = true,
open_prepress_interface = true,
- opentype_fonts = true,
optional_content = true,
transparency = true,
jbig2_compression = true,
@@ -216,6 +219,7 @@ local pdfx = {
["pdf/x-5pg"] = {
pdf_version = 1.6,
format_name = "PDF/X-5pg",
+ xmp_file = "lpdf-pdx.xml",
gray_scale = true,
cmyk_colors = true,
rgb_colors = true,
@@ -226,7 +230,6 @@ local pdfx = {
external_icc_profiles = true,
include_intents = true,
open_prepress_interface = true,
- opentype_fonts = true,
optional_content = true,
transparency = true,
jbig2_compression = true,
@@ -239,6 +242,7 @@ local pdfx = {
["pdf/x-5n"] = {
pdf_version = 1.6,
format_name = "PDF/X-5n",
+ xmp_file = "lpdf-pdx.xml",
gray_scale = true,
cmyk_colors = true,
rgb_colors = true,
@@ -247,7 +251,6 @@ local pdfx = {
cielab_colors = true,
internal_icc_profiles = true,
include_intents = true,
- opentype_fonts = true,
optional_content = true,
transparency = true,
jbig2_compression = true,
@@ -257,10 +260,43 @@ local pdfx = {
inject_metadata = function()
-- todo
end
- }
+ },
+ ["pdf/a-1a:2005"] = {
+ pdf_version = 1.4,
+ format_name = "pdf/a-1a:2005",
+ xmp_file = "lpdf-pda.xml",
+ gray_scale = true,
+ cmyk_colors = true,
+ rgb_colors = true,
+ spot_colors = true,
+ calibrated_rgb_colors = true, -- unknown
+ cielab_colors = true, -- unknown
+ include_intents = true,
+ forms = true, -- NEW; forms are allowed (with limitations); no JS, other restrictions are unknown (TODO)
+ tagging = true, -- NEW; the only difference to PDF/A-1b
+ inject_metadata = function()
+ injectxmpinfo("xml://rdf:RDF","<rdf:Description rdf:about='' xmlns:pdfaid='http://www.aiim.org/pdfa/ns/id/'><pdfaid:part>1</pdfaid:part><pdfaid:conformance>A</pdfaid:conformance></rdf:Description>",false)
+ end
+ },
+ ["pdf/a-1b:2005"] = {
+ pdf_version = 1.4,
+ format_name = "pdf/a-1b:2005",
+ xmp_file = "lpdf-pda.xml",
+ gray_scale = true,
+ cmyk_colors = true,
+ rgb_colors = true,
+ spot_colors = true,
+ calibrated_rgb_colors = true, -- unknown
+ cielab_colors = true, -- unknown
+ include_intents = true,
+ forms = true,
+ inject_metadata = function()
+ injectxmpinfo("xml://rdf:RDF","<rdf:Description rdf:about='' xmlns:pdfaid='http://www.aiim.org/pdfa/ns/id/'><pdfaid:part>1</pdfaid:part><pdfaid:conformance>B</pdfaid:conformance></rdf:Description>",false)
+ end
+ },
}
-lpdf.pdfx = pdfx -- it does not hurt to have this one visible
+lpdf.formats = formats -- it does not hurt to have this one visible
local filenames = {
"colorprofiles.xml",
@@ -268,7 +304,7 @@ local filenames = {
}
local function locatefile(filename)
- local fullname = resolvers.find_file(filename,"icc")
+ local fullname = resolvers.findfile(filename,"icc")
if not fullname or fullname == "" then
fullname = resolvers.finders.loc(filename) -- could be specific to the project
end
@@ -308,7 +344,7 @@ local function loadprofile(name,filename)
if next(profile) then
report_backends("profile specification '%s' loaded from '%s'",name,filename)
return profile
- elseif trace_pdfx then
+ elseif trace_format then
report_backends("profile specification '%s' loaded from '%s' but empty",name,filename)
end
return false
@@ -368,13 +404,13 @@ local function handleinternalprofile(s,include)
attr = a(),
}
internalprofiles[tag] = profile
- if trace_pdfx then
+ if trace_format then
report_backends("including '%s' color profile from '%s'",colorspace,fullname)
end
end
else
internalprofiles[tag] = true
- if trace_pdfx then
+ if trace_format then
report_backends("not including '%s' color profile '%s'",colorspace,filename)
end
end
@@ -447,7 +483,7 @@ local function handledefaultprofile(s) -- specification
else
report_backends("no default profile '%s' for colorspace '%s'",filename,colorspace)
end
- elseif trace_pdfx then
+ elseif trace_format then
report_backends("a default '%s' colorspace is already in use",colorspace)
end
end
@@ -478,14 +514,14 @@ local function handleoutputintent(s)
report_backends("omitting reference to profile for intent '%s'",name)
end
intents[#intents+1] = pdfreference(pdfflushobject(pdfdictionary(d)))
- if trace_pdfx then
+ if trace_format then
report_backends("setting output intent to '%s' with id '%s' (entry %s)",name,id,#intents)
end
else
report_backends("invalid output intent '%s'",name)
end
loadedintents[name] = true
- elseif trace_pdfx then
+ elseif trace_format then
report_backends("an output intent with name '%s' is already in use",name)
end
end
@@ -496,48 +532,48 @@ local function handleiccprofile(message,name,filename,how,options,alwaysinclude)
for i=1,#list do
local name = list[i]
local profile = loadprofile(name,filename)
- if trace_pdfx then
+ if trace_format then
report_backends("handling %s '%s'",message,name)
end
if profile then
- if pdfxspecification.cmyk_colors then
+ if formatspecification.cmyk_colors then
profile.colorspace = profile.colorspace or "CMYK"
else
profile.colorspace = profile.colorspace or "RGB"
end
- local external = pdfxspecification.external_icc_profiles
- local internal = pdfxspecification.internal_icc_profiles
- local include = pdfxspecification.include_intents
+ local external = formatspecification.external_icc_profiles
+ local internal = formatspecification.internal_icc_profiles
+ local include = formatspecification.include_intents
local always, never = options[variables.always], options[variables.never]
if always or alwaysinclude then
- if trace_pdfx then
+ if trace_format then
report_backends("forcing internal profiles") -- can make preflight unhappy
end
-- internal, external = true, false
internal, external = not never, false
elseif never then
- if trace_pdfx then
+ if trace_format then
report_backends("forcing external profiles") -- can make preflight unhappy
end
internal, external = false, true
end
if external then
- if trace_pdfx then
+ if trace_format then
report_backends("handling external profiles cf. '%s'",name)
end
handleexternalprofile(profile,false)
else
- if trace_pdfx then
+ if trace_format then
report_backends("handling internal profiles cf. '%s'",name)
end
if internal then
handleinternalprofile(profile,always or include)
else
- report_backends("no profile inclusion for '%s'",pdfxformat)
+ report_backends("no profile inclusion for '%s'",formatname)
end
end
how(profile)
- elseif trace_pdfx then
+ elseif trace_format then
report_backends("unknown profile '%s'",name)
end
end
@@ -562,16 +598,22 @@ function codeinjections.setformat(s)
if format == "" then
-- we ignore this as we hook it in \everysetupbackend
else
- local spec = pdfx[lower(format)]
+ local spec = formats[lower(format)]
if spec then
- pdfxspecification, pdfxformat = spec, spec.format_name
+ formatspecification, formatname = spec, spec.format_name
level = level and tonumber(level)
- report_backends("setting format to '%s'",pdfxformat)
+ report_backends("setting format to '%s'",formatname)
+ local xmp_file = formatspecification.xmp_file or ""
+ if xmp_file == "" then
+ -- weird error
+ else
+ lpdf.setxmpfile(xmp_file)
+ end
local pdf_version, inject_metadata = spec.pdf_version * 10, spec.inject_metadata
local majorversion, minorversion = math.div(pdf_version,10), math.mod(pdf_version,10)
local objectcompression = spec.object_compression and pdf_version >= 15
local compresslevel = level or tex.pdfcompresslevel -- keep default
- local objectcompresslevel = (objectcompression and level or tex.pdfobjcompresslevel) or 0
+ local objectcompresslevel = (objectcompression and (level or tex.pdfobjcompresslevel)) or 0
tex.pdfcompresslevel, tex.pdfobjcompresslevel = compresslevel, objectcompresslevel
tex.pdfmajorversion, tex.pdfminorversion = majorversion, minorversion
if objectcompression then
@@ -616,16 +658,16 @@ function codeinjections.setformat(s)
local options = settings_to_hash(option)
handleiccprofile("color profile",profile,filename,handledefaultprofile,options,true)
handleiccprofile("output intent",intent,filename,handleoutputintent,options,false)
- if trace_format then
- for k, v in table.sortedhash(pdfx.default) do
- local v = pdfxspecification[k]
+ if trace_variables then
+ for k, v in table.sortedhash(formats.default) do
+ local v = formatspecification[k]
if type(v) ~= "function" then
report_backends("%s = %s",k,tostring(v or false))
end
end
end
function codeinjections.setformat(noname)
- report_backends("error, format is already set to '%s', ignoring '%s'",pdfxformat,noname.format)
+ report_backends("error, format is already set to '%s', ignoring '%s'",formatname,noname.format)
end
else
report_backends("error, format '%s' is not supported",format)
@@ -634,12 +676,12 @@ function codeinjections.setformat(s)
end
function codeinjections.getformatoption(key)
- return pdfxspecification and pdfxspecification[key]
+ return formatspecification and formatspecification[key]
end
function codeinjections.supportedformats()
local t = { }
- for k, v in table.sortedhash(pdfx) do
+ for k, v in table.sortedhash(formats) do
if find(k,"pdf") then
t[#t+1] = k
end
diff --git a/tex/context/base/lpdf-ini.lua b/tex/context/base/lpdf-ini.lua
index 07e1962cd..0a98f150b 100644
--- a/tex/context/base/lpdf-ini.lua
+++ b/tex/context/base/lpdf-ini.lua
@@ -342,16 +342,21 @@ lpdf.verbose = pdfverbose
local names, cache = { }, { }
function lpdf.reserveobject(name)
- local r = pdfreserveobject()
- if name then
- names[name] = r
- if trace_objects then
- report_backends("reserving object number %s under name '%s'",r,name)
+ if name == "annot" then
+ -- catch misuse
+ return pdfreserveobject("annot")
+ else
+ local r = pdfreserveobject()
+ if name then
+ names[name] = r
+ if trace_objects then
+ report_backends("reserving object number %s under name '%s'",r,name)
+ end
+ elseif trace_objects then
+ report_backends("reserving object number %s",r)
end
- elseif trace_objects then
- report_backends("reserving object number %s",r)
+ return r
end
- return r
end
function lpdf.reserveannotation()
@@ -409,15 +414,34 @@ function lpdf.flushobject(name,data)
end
end
-function lpdf.sharedobject(content)
- local r = cache[content]
+local shareobjectcache, shareobjectreferencecache = { }, { }
+
+function lpdf.shareobject(content)
+ content = tostring(content)
+ local o = shareobjectcache[content]
+ if not o then
+ o = pdfimmediateobject(content)
+ shareobjectcache[content] = o
+ end
+ return o
+end
+
+function lpdf.shareobjectreference(content)
+ content = tostring(content)
+ local r = shareobjectreferencecache[content]
if not r then
- r = pdfreference(pdfimmediateobject(content))
- cache[content] = r
+ local o = shareobjectcache[content]
+ if not o then
+ o = pdfimmediateobject(content)
+ shareobjectcache[content] = o
+ end
+ r = pdfreference(o)
+ shareobjectreferencecache[content] = r
end
return r
end
+
--~ local d = lpdf.dictionary()
--~ local e = lpdf.dictionary { ["e"] = "abc", x = lpdf.dictionary { ["f"] = "ABC" } }
--~ local f = lpdf.dictionary { ["f"] = "ABC" }
diff --git a/tex/context/base/lpdf-mis.lua b/tex/context/base/lpdf-mis.lua
index bdc9147a7..6bf277d4b 100644
--- a/tex/context/base/lpdf-mis.lua
+++ b/tex/context/base/lpdf-mis.lua
@@ -43,7 +43,6 @@ local pdfstring = lpdf.string
local pdfflushobject = lpdf.flushobject
local pdfimmediateobject = lpdf.immediateobject
-local tobasepoints = number.tobasepoints
local variables = interfaces.variables
--
diff --git a/tex/context/base/lpdf-pda.xml b/tex/context/base/lpdf-pda.xml
new file mode 100644
index 000000000..2d8e7b6f5
--- /dev/null
+++ b/tex/context/base/lpdf-pda.xml
@@ -0,0 +1,171 @@
+<?xml version="1.0"?>
+
+<!-- lpdf-pda.xml -->
+
+<x:xmpmeta xmlns:x="adobe:ns:meta/">
+ <rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
+ <rdf:Description rdf:about="" xmlns:dc="http://purl.org/dc/elements/1.1/">
+ <dc:format>application/pdf</dc:format>
+ <dc:creator>
+ <rdf:Seq>
+ <rdf:li/>
+ </rdf:Seq>
+ </dc:creator>
+ <dc:description/>
+ <dc:title>
+ <rdf:Alt>
+ <rdf:li xml:lang="x-default"/>
+ </rdf:Alt>
+ </dc:title>
+ </rdf:Description>
+ <rdf:Description rdf:about="" xmlns:pdfx="http://ns.adobe.com/pdfx/1.3/">
+ <pdfx:ConTeXt.Jobname/>
+ <pdfx:ConTeXt.Time/>
+ <pdfx:ConTeXt.Url/>
+ <pdfx:ConTeXt.Version/>
+ <pdfx:ID/>
+ <pdfx:PTEX.Fullbanner/>
+ </rdf:Description>
+ <rdf:Description rdf:about="" xmlns:xmp="http://ns.adobe.com/xap/1.0/">
+ <xmp:CreateDate/>
+ <xmp:CreatorTool/>
+ <xmp:ModifyDate/>
+ <xmp:MetadataDate/>
+ </rdf:Description>
+ <rdf:Description rdf:about="" xmlns:pdf="http://ns.adobe.com/pdf/1.3/">
+ <pdf:Keywords/>
+ <pdf:Producer/>
+ <pdf:Trapped>False</pdf:Trapped>
+ </rdf:Description>
+ <rdf:Description rdf:about="" xmlns:xmpMM="http://ns.adobe.com/xap/1.0/mm/">
+ <xmpMM:DocumentID/>
+ <xmpMM:InstanceID/>
+ </rdf:Description>
+ <rdf:Description rdf:about=""
+ xmlns:pdfaExtension="http://www.aiim.org/pdfa/ns/extension/"
+ xmlns:pdfaSchema="http://www.aiim.org/pdfa/ns/schema#"
+ xmlns:pdfaProperty="http://www.aiim.org/pdfa/ns/property#">
+ <pdfaExtension:schemas>
+ <rdf:Bag>
+ <rdf:li rdf:parseType="Resource">
+ <pdfaSchema:namespaceURI>http://ns.adobe.com/pdf/1.3/</pdfaSchema:namespaceURI>
+ <pdfaSchema:prefix>pdf</pdfaSchema:prefix>
+ <pdfaSchema:schema>Adobe PDF Schema</pdfaSchema:schema>
+ <pdfaSchema:property>
+ <rdf:Seq>
+ <rdf:li rdf:parseType="Resource">
+ <pdfaProperty:category>internal</pdfaProperty:category>
+ <pdfaProperty:description>A name object indicating whether the document has been modified to include trapping information</pdfaProperty:description>
+ <pdfaProperty:name>Trapped</pdfaProperty:name>
+ <pdfaProperty:valueType>Text</pdfaProperty:valueType>
+ </rdf:li>
+ </rdf:Seq>
+ </pdfaSchema:property>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <pdfaSchema:namespaceURI>http://purl.org/dc/elements/1.1/</pdfaSchema:namespaceURI>
+ <pdfaSchema:prefix>pdf</pdfaSchema:prefix>
+ <pdfaSchema:schema>Dubline Core Schema</pdfaSchema:schema>
+ <pdfaSchema:property>
+ <rdf:Seq>
+ <rdf:li rdf:parseType="Resource">
+ <pdfaProperty:category>internal</pdfaProperty:category>
+ <pdfaProperty:description>Subject in Document Properties</pdfaProperty:description>
+ <pdfaProperty:name>description</pdfaProperty:name>
+ <pdfaProperty:valueType>Text</pdfaProperty:valueType>
+ </rdf:li>
+ </rdf:Seq>
+ </pdfaSchema:property>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <pdfaSchema:namespaceURI>http://ns.adobe.com/pdfx/1.3/</pdfaSchema:namespaceURI>
+ <pdfaSchema:prefix>pdfx</pdfaSchema:prefix>
+ <pdfaSchema:schema>PDF/X ID Schema</pdfaSchema:schema>
+ <pdfaSchema:property>
+ <rdf:Seq>
+ <rdf:li rdf:parseType="Resource">
+ <pdfaProperty:category>external</pdfaProperty:category>
+ <pdfaProperty:description>Name of the ConTeXt job</pdfaProperty:description>
+ <pdfaProperty:name>ConTeXt.Jobname</pdfaProperty:name>
+ <pdfaProperty:valueType>Text</pdfaProperty:valueType>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <pdfaProperty:category>external</pdfaProperty:category>
+ <pdfaProperty:description>Time stamp of ConTeXt version</pdfaProperty:description>
+ <pdfaProperty:name>ConTeXt.Time</pdfaProperty:name>
+ <pdfaProperty:valueType>Text</pdfaProperty:valueType>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <pdfaProperty:category>external</pdfaProperty:category>
+ <pdfaProperty:description>ConTeXt website</pdfaProperty:description>
+ <pdfaProperty:name>ConTeXt.Url</pdfaProperty:name>
+ <pdfaProperty:valueType>Text</pdfaProperty:valueType>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <pdfaProperty:category>external</pdfaProperty:category>
+ <pdfaProperty:description>ConTeXt version</pdfaProperty:description>
+ <pdfaProperty:name>ConTeXt.Version</pdfaProperty:name>
+ <pdfaProperty:valueType>Text</pdfaProperty:valueType>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <pdfaProperty:category>external</pdfaProperty:category>
+ <pdfaProperty:description>Banner of pdftex or one of its successors</pdfaProperty:description>
+ <pdfaProperty:name>PTEX.Fullbanner</pdfaProperty:name>
+ <pdfaProperty:valueType>Text</pdfaProperty:valueType>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <pdfaProperty:category>external</pdfaProperty:category>
+ <pdfaProperty:description>Document identifier</pdfaProperty:description>
+ <pdfaProperty:name>ID</pdfaProperty:name>
+ <pdfaProperty:valueType>Text</pdfaProperty:valueType>
+ </rdf:li>
+ </rdf:Seq>
+ </pdfaSchema:property>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <pdfaSchema:namespaceURI>http://ns.adobe.com/xap/1.0/mm/</pdfaSchema:namespaceURI>
+ <pdfaSchema:prefix>xmpMM</pdfaSchema:prefix>
+ <pdfaSchema:schema>XMP Media Management Schema</pdfaSchema:schema>
+ <pdfaSchema:property>
+ <rdf:Seq>
+ <rdf:li rdf:parseType="Resource">
+ <pdfaProperty:category>internal</pdfaProperty:category>
+ <pdfaProperty:description>UUID based identifier for specific incarnation of a document</pdfaProperty:description>
+ <pdfaProperty:name>InstanceID</pdfaProperty:name>
+ <pdfaProperty:valueType>URI</pdfaProperty:valueType>
+ </rdf:li>
+ </rdf:Seq>
+ </pdfaSchema:property>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <pdfaSchema:namespaceURI>http://www.aiim.org/pdfa/ns/id/</pdfaSchema:namespaceURI>
+ <pdfaSchema:prefix>pdfaid</pdfaSchema:prefix>
+ <pdfaSchema:schema>PDF/A ID Schema</pdfaSchema:schema>
+ <pdfaSchema:property>
+ <rdf:Seq>
+ <rdf:li rdf:parseType="Resource">
+ <pdfaProperty:category>internal</pdfaProperty:category>
+ <pdfaProperty:description>Part of PDF/A standard</pdfaProperty:description>
+ <pdfaProperty:name>part</pdfaProperty:name>
+ <pdfaProperty:valueType>Integer</pdfaProperty:valueType>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <pdfaProperty:category>internal</pdfaProperty:category>
+ <pdfaProperty:description>Amendment of PDF/A standard</pdfaProperty:description>
+ <pdfaProperty:name>amd</pdfaProperty:name>
+ <pdfaProperty:valueType>Text</pdfaProperty:valueType>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <pdfaProperty:category>internal</pdfaProperty:category>
+ <pdfaProperty:description>Conformance level of PDF/A standard</pdfaProperty:description>
+ <pdfaProperty:name>conformance</pdfaProperty:name>
+ <pdfaProperty:valueType>Text</pdfaProperty:valueType>
+ </rdf:li>
+ </rdf:Seq>
+ </pdfaSchema:property>
+ </rdf:li>
+ </rdf:Bag>
+ </pdfaExtension:schemas>
+ </rdf:Description>
+ </rdf:RDF>
+</x:xmpmeta>
diff --git a/tex/context/base/lpdf-xmp.xml b/tex/context/base/lpdf-pdx.xml
index 75657db69..42e11650e 100644
--- a/tex/context/base/lpdf-xmp.xml
+++ b/tex/context/base/lpdf-pdx.xml
@@ -1,5 +1,7 @@
<?xml version="1.0"?>
+<!-- lpdf-pdx.xml -->
+
<x:xmpmeta xmlns:x="adobe:ns:meta/">
<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
<rdf:Description rdf:about="" xmlns:dc="http://purl.org/dc/elements/1.1/">
diff --git a/tex/context/base/lpdf-ren.lua b/tex/context/base/lpdf-ren.lua
index 8f9bc02e0..7c2d96467 100644
--- a/tex/context/base/lpdf-ren.lua
+++ b/tex/context/base/lpdf-ren.lua
@@ -15,17 +15,17 @@ local settings_to_array = utilities.parsers.settings_to_array
local backends, lpdf = backends, lpdf
-local nodeinjections = backends.pdf.nodeinjections
-local codeinjections = backends.pdf.codeinjections
-local registrations = backends.pdf.registrations
-local viewerlayers = attributes.viewerlayers
+local nodeinjections = backends.pdf.nodeinjections
+local codeinjections = backends.pdf.codeinjections
+local registrations = backends.pdf.registrations
+local viewerlayers = attributes.viewerlayers
-local references = structures.references
+local references = structures.references
-references.executers = references.executers or { }
-local executers = references.executers
+references.executers = references.executers or { }
+local executers = references.executers
-local variables = interfaces.variables
+local variables = interfaces.variables
local pdfconstant = lpdf.constant
local pdfdictionary = lpdf.dictionary
@@ -34,12 +34,12 @@ local pdfreference = lpdf.reference
local pdfflushobject = lpdf.flushobject
local pdfreserveobject = lpdf.reserveobject
-local pdf_ocg = pdfconstant("OCG")
-local pdf_ocmd = pdfconstant("OCMD")
-local pdf_off = pdfconstant("OFF")
-local pdf_on = pdfconstant("ON")
-local pdf_toggle = pdfconstant("Toggle")
-local pdf_setocgstate = pdfconstant("SetOCGState")
+local pdf_ocg = pdfconstant("OCG")
+local pdf_ocmd = pdfconstant("OCMD")
+local pdf_off = pdfconstant("OFF")
+local pdf_on = pdfconstant("ON")
+local pdf_toggle = pdfconstant("Toggle")
+local pdf_setocgstate = pdfconstant("SetOCGState")
local lpdf_usage = pdfdictionary { Print = pdfdictionary { PrintState = pdfconstant("OFF") } }
diff --git a/tex/context/base/lpdf-tag.lua b/tex/context/base/lpdf-tag.lua
index 4e297b91c..d05e3e22f 100644
--- a/tex/context/base/lpdf-tag.lua
+++ b/tex/context/base/lpdf-tag.lua
@@ -57,110 +57,20 @@ local root = { pref = pdfreference(structure_ref), kids = structure_k
local tree = { }
local elements = { }
local names = pdfarray()
-local taglist = { } -- set later
+local taglist = structures.tags.taglist
+local usedlabels = structures.tags.labels
+local properties = structures.tags.properties
+local usedmapping = { }
local colonsplitter = lpeg.splitat(":")
local dashsplitter = lpeg.splitat("-")
local add_ids = false -- true
-local mapping = {
- document = "Div",
-
- division = "Div",
- paragraph = "P",
- construct = "Span",
-
- structure = "Sect",
- structuretitle = "H",
- structurenumber = "H",
- structurecontent = "Div",
-
- itemgroup = "L",
- item = "Li",
- itemtag = "Lbl",
- itemcontent = "LBody",
-
- description = "Li",
- descriptiontag = "Lbl",
- descriptioncontent = "LBody",
-
- verbatimblock = "Code",
- verbatim = "Code",
-
- register = "Div",
- registersection = "Div",
- registertag = "Span",
- registerentries = "Div",
- registerentry = "Span",
- registersee = "Span",
- registerpages = "Span",
- registerpage = "Span",
-
- table = "Table",
- tablerow = "TR",
- tablecell = "TD",
- tabulate = "Table",
- tabulaterow = "TR",
- tabulatecell = "TD",
-
- list = "TOC",
- listitem = "TOCI",
- listtag = "Lbl",
- listcontent = "P",
- listdata = "P",
- listpage = "Reference",
-
- delimitedblock = "BlockQuote",
- delimited = "Quote",
- subsentence = "Span",
-
- float = "Div",
- floatcaption = "Caption",
- floattag = "Span",
- floattext = "Span",
- floatcontent = "P",
-
- image = "P",
- mpgraphic = "P",
-
- formulaset = "Div",
- formula = "Div",
- formulatag = "Span",
- formulacontent = "P",
- subformula = "Div",
-
- link = "Link",
-
- math = "Div",
- mn = "Span",
- mi = "Span",
- mo = "Span",
- ms = "Span",
- mrow = "Span",
- msubsup = "Span",
- msub = "Span",
- msup = "Span",
- merror = "Span",
- munderover = "Span",
- munder = "Span",
- mover = "Span",
- mtext = "Span",
- mfrac = "Span",
- mroot = "Span",
- msqrt = "Span",
-}
-
-local usedmapping = { }
-local usedlabels = { }
-function codeinjections.mapping()
- return mapping -- future versions may provide a copy
-end
-
-function codeinjections.maptag(original,target)
- mapping[original] = target
-end
+--~ function codeinjections.maptag(original,target,kind)
+--~ mapping[original] = { target, kind or "inline" }
+--~ end
local function finishstructure()
if #structure_kids > 0 then
@@ -186,7 +96,8 @@ local function finishstructure()
local rolemap = pdfdictionary()
for k, v in next, usedmapping do
k = usedlabels[k] or k
- rolemap[k] = pdfconstant(mapping[k] or "Span") -- or "Div"
+ local p = properties[k]
+ rolemap[k] = pdfconstant(p and p.pdf or "Span") -- or "Div"
end
local structuretree = pdfdictionary {
Type = pdfconstant("StructTreeRoot"),
@@ -240,15 +151,15 @@ local function makeelement(fulltag,parent)
usedmapping[tg] = true
tg = usedlabels[tg] or tg
local d = pdfdictionary {
- Type = pdf_struct_element,
- S = pdfconstant(tg),
- ID = (add_ids and fulltag) or nil,
- T = detail and detail or nil,
- P = parent.pref,
- Pg = pageref,
- K = pdfreference(r),
---~ Alt = " Who cares ",
---~ ActualText = " Hi Hans ",
+ Type = pdf_struct_element,
+ S = pdfconstant(tg),
+ ID = (add_ids and fulltag) or nil,
+ T = detail and detail or nil,
+ P = parent.pref,
+ Pg = pageref,
+ K = pdfreference(r),
+ -- Alt = " Who cares ",
+ -- ActualText = " Hi Hans ",
}
local s = pdfreference(pdfflushobject(d))
if add_ids then
@@ -309,7 +220,7 @@ end
-- -- --
-local level, last, ranges, range = 0, nil, { }, { }
+local level, last, ranges, range = 0, nil, { }, nil
local function collectranges(head,list)
for n in traverse_nodes(head) do
@@ -336,8 +247,9 @@ local function collectranges(head,list)
end
last = nil
else
- slide_nodelist(n.list) -- temporary hack till math gets slided (tracker item)
- collectranges(n.list,n)
+ local nl = n.list
+ slide_nodelist(nl) -- temporary hack till math gets slided (tracker item)
+ collectranges(nl,n)
end
end
end
@@ -345,7 +257,7 @@ end
function nodeinjections.addtags(head)
-- no need to adapt head, as we always operate on lists
- level, last, ranges, range = 0, nil, { }, { }
+ level, last, ranges, range = 0, nil, { }, nil
initializepage()
collectranges(head)
if trace_tags then
@@ -353,7 +265,7 @@ function nodeinjections.addtags(head)
local range = ranges[i]
local attr, id, start, stop = range[1], range[2], range[3], range[4]
local tags = taglist[attr]
- if tags then
+ if tags then -- not ok ... only first lines
report_tags("%s => %s : %05i %s",tosequence(start,start),tosequence(stop,stop),attr,concat(tags," "))
end
end
@@ -386,7 +298,6 @@ function nodeinjections.addtags(head)
end
function codeinjections.enabletags(tg,lb)
- taglist, usedlabels = tg, lb
structures.tags.handler = nodeinjections.addtags
tasks.enableaction("shipouts","structures.tags.handler")
tasks.enableaction("shipouts","nodes.handlers.accessibility")
diff --git a/tex/context/base/lpdf-wid.lua b/tex/context/base/lpdf-wid.lua
index ccb1ee7f2..651790630 100644
--- a/tex/context/base/lpdf-wid.lua
+++ b/tex/context/base/lpdf-wid.lua
@@ -27,8 +27,7 @@ local pdfunicode = lpdf.unicode
local pdfstring = lpdf.string
local pdfcolorspec = lpdf.colorspec
local pdfflushobject = lpdf.flushobject
-local pdfreserveobject = lpdf.reserveobject
-local pdfreserveannotation = lpdf.reserveobject
+local pdfreserveannotation = lpdf.reserveannotation
local pdfimmediateobject = lpdf.immediateobject
local pdfpagereference = lpdf.pagereference
@@ -131,10 +130,14 @@ function codeinjections.registercomment(specification)
Name = name,
AP = appearance,
}
+ --
-- watch the nice feed back to tex hack
+ --
+ -- we can consider replacing nodes by user nodes that do a latelua
+ -- so that we get rid of all annotation whatsits
if usepopupcomments then
- local nd = pdfreserveobject()
- local nc = pdfreserveobject()
+ local nd = pdfreserveannotation()
+ local nc = pdfreserveannotation()
local c = pdfdictionary {
Subtype = pdfconstant("Popup"),
Parent = pdfreference(nd),
diff --git a/tex/context/base/lpdf-xmp.lua b/tex/context/base/lpdf-xmp.lua
index 294bf99e7..490eacbda 100644
--- a/tex/context/base/lpdf-xmp.lua
+++ b/tex/context/base/lpdf-xmp.lua
@@ -12,6 +12,8 @@ local xmlfillin = xml.fillin
local trace_xmp = false trackers.register("backend.xmp", function(v) trace_xmp = v end)
+local report_xmp = logs.new("backends")
+
local lpdf = lpdf
local pdfdictionary = lpdf.dictionary
@@ -73,18 +75,32 @@ local mapping = {
-- maybe some day we will load the xmp file at runtime
-local xmp, xmpfile, xmpname = nil, nil, "lpdf-xmp.xml"
+local xmp, xmpfile, xmpname = nil, nil, "lpdf-pdx.xml"
function lpdf.setxmpfile(name)
- xmpfile = resolvers.findctxfile(name) or ""
- if xmpfile == "" then
- xmpfile = nil
+ -- xmpfile = resolvers.findctxfile(name) or ""
+ -- if xmpfile == "" then
+ -- xmpfile = nil
+ -- end
+ if xmp then
+ report_xmp("discarding loaded xmp file '%s'",xmpfile)
+ xmp = nil
end
+ xmpfile = name ~= "" and name
end
local function valid_xmp()
if not xmp then
- local xmpfile = xmpfile or resolvers.find_file(xmpname) or ""
+ -- local xmpfile = xmpfile or resolvers.findfile(xmpname) or ""
+ if xmpfile and xmpfile ~= "" then
+ xmpfile = resolvers.findfile(xmpfile) or ""
+ end
+ if not xmpfile or xmpfile == "" then
+ xmpfile = resolvers.findfile(xmpname) or ""
+ end
+ if xmpfile ~= "" then
+ report_xmp("using xmp file '%s'",xmpfile)
+ end
local xmpdata = (xmpfile ~= "" and io.loaddata(xmpfile)) or ""
xmp = xml.convert(xmpdata)
end
diff --git a/tex/context/base/luat-cbk.lua b/tex/context/base/luat-cbk.lua
index 454c89425..a21976372 100644
--- a/tex/context/base/luat-cbk.lua
+++ b/tex/context/base/luat-cbk.lua
@@ -38,7 +38,7 @@ local frozen, stack, list = { }, { }, callbacks.list
if not callbacks.list then -- otherwise counters get reset
- list = list_callbacks()
+ list = utilities.storage.allocate(list_callbacks())
for k, _ in next, list do
list[k] = 0
diff --git a/tex/context/base/luat-cnf.lua b/tex/context/base/luat-cnf.lua
index 4e723afee..8b52a269c 100644
--- a/tex/context/base/luat-cnf.lua
+++ b/tex/context/base/luat-cnf.lua
@@ -9,13 +9,15 @@ if not modules then modules = { } end modules ['luat-cnf'] = {
local type, next, tostring, tonumber = type, next, tostring, tonumber
local format, concat, find = string.format, table.concat, string.find
+local allocate = utilities.storage.allocate
+
texconfig.kpse_init = false
texconfig.shell_escape = 't'
luatex = luatex or { }
local luatex = luatex
-local variablenames = { -- most of this becomes obsolete
+local variablenames = allocate { -- most of this becomes obsolete
'buf_size', -- 3000
'dvi_buf_size', -- 16384
'error_line', -- 79
@@ -37,10 +39,10 @@ local variablenames = { -- most of this becomes obsolete
}
local function initialize()
- local t, var_value = { }, resolvers.var_value
+ local t, variable = allocate(), resolvers.variable
for i=1,#variablenames do
local name = variablenames[i]
- local value = var_value(name)
+ local value = variable(name)
value = tonumber(value) or value
texconfig[name], t[name] = value, value
end
@@ -89,6 +91,7 @@ function texconfig.init()
},
obsolete = {
"fontforge", -- can be filled by luat-log
+ "kpse",
},
builtin = builtin, -- to be filled
globals = globals, -- to be filled
diff --git a/tex/context/base/luat-cod.lua b/tex/context/base/luat-cod.lua
index 60a0b616f..a06c8c0bf 100644
--- a/tex/context/base/luat-cod.lua
+++ b/tex/context/base/luat-cod.lua
@@ -56,6 +56,9 @@ local finalizers = { }
function lua.registerfinalizer(f,comment)
if type(f) == "function" then
finalizers[#finalizers+1] = { action = f, comment = comment }
+ else
+ print(string.format("fatal error: invalid finalizer, action: %s",finalizer.comment or "unknown"))
+ os.exit()
end
end
diff --git a/tex/context/base/luat-dum.lua b/tex/context/base/luat-dum.lua
index a8eed5d88..be886692a 100644
--- a/tex/context/base/luat-dum.lua
+++ b/tex/context/base/luat-dum.lua
@@ -40,6 +40,10 @@ logs = {
callbacks = {
register = function(n,f) return callback.register(n,f) end,
}
+utilities = {
+ allocate = function(t) return t end,
+ mark = function(t) return t end,
+}
-- we need to cheat a bit here
@@ -56,7 +60,7 @@ local remapper = {
fea = "font feature files",
}
-function resolvers.find_file(name,kind)
+function resolvers.findfile(name,kind)
name = string.gsub(name,"\\","\/")
kind = string.lower(kind)
return kpse.find_file(name,(kind and kind ~= "" and (remapper[kind] or kind)) or file.extname(name,"tex"))
@@ -66,7 +70,7 @@ function resolvers.findbinfile(name,kind)
if not kind or kind == "" then
kind = file.extname(name) -- string.match(name,"%.([^%.]-)$")
end
- return resolvers.find_file(name,(kind and remapper[kind]) or kind)
+ return resolvers.findfile(name,(kind and remapper[kind]) or kind)
end
-- Caches ... I will make a real stupid version some day when I'm in the
diff --git a/tex/context/base/luat-env.lua b/tex/context/base/luat-env.lua
index 5fa9550f7..ed0884992 100644
--- a/tex/context/base/luat-env.lua
+++ b/tex/context/base/luat-env.lua
@@ -16,6 +16,8 @@ local trace_locating = false trackers.register("resolvers.locating", function(v
local report_resolvers = logs.new("resolvers")
+local allocate, mark = utilities.storage.allocate, utilities.storage.mark
+
local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find
local unquote, quote = string.unquote, string.quote
local concat = table.concat
@@ -45,8 +47,8 @@ end
environment = environment or { }
local environment = environment
-environment.arguments = { }
-environment.files = { }
+environment.arguments = allocate()
+environment.files = allocate()
environment.sortedflags = nil
local mt = {
@@ -114,7 +116,7 @@ function environment.argument(name,partial)
return arguments[name]
elseif partial then
if not sortedflags then
- sortedflags = table.sortedkeys(arguments)
+ sortedflags = allocate(table.sortedkeys(arguments))
for k=1,#sortedflags do
sortedflags[k] = "^" .. sortedflags[k]
end
@@ -200,8 +202,8 @@ if arg then
environment.initializearguments(newarg)
- environment.originalarguments = newarg
- environment.rawarguments = arg
+ environment.originalarguments = mark(newarg)
+ environment.rawarguments = mark(arg)
arg = { } -- prevent duplicate handling
@@ -210,19 +212,19 @@ end
-- weird place ... depends on a not yet loaded module
function environment.texfile(filename)
- return resolvers.find_file(filename,'tex')
+ return resolvers.findfile(filename,'tex')
end
function environment.luafile(filename)
- local resolved = resolvers.find_file(filename,'tex') or ""
+ local resolved = resolvers.findfile(filename,'tex') or ""
if resolved ~= "" then
return resolved
end
- resolved = resolvers.find_file(filename,'texmfscripts') or ""
+ resolved = resolvers.findfile(filename,'texmfscripts') or ""
if resolved ~= "" then
return resolved
end
- return resolvers.find_file(filename,'luatexlibs') or ""
+ return resolvers.findfile(filename,'luatexlibs') or ""
end
environment.loadedluacode = loadfile -- can be overloaded
diff --git a/tex/context/base/luat-fmt.lua b/tex/context/base/luat-fmt.lua
index 13f751215..0b7221873 100644
--- a/tex/context/base/luat-fmt.lua
+++ b/tex/context/base/luat-fmt.lua
@@ -33,7 +33,7 @@ function environment.make_format(name)
logs.simple("format path: %s",lfs.currentdir())
-- check source file
local texsourcename = file.addsuffix(name,"tex")
- local fulltexsourcename = resolvers.find_file(texsourcename,"tex") or ""
+ local fulltexsourcename = resolvers.findfile(texsourcename,"tex") or ""
if fulltexsourcename == "" then
logs.simple("no tex source file with name: %s",texsourcename)
lfs.chdir(olddir)
@@ -44,10 +44,10 @@ function environment.make_format(name)
local texsourcepath = dir.expandname(file.dirname(fulltexsourcename)) -- really needed
-- check specification
local specificationname = file.replacesuffix(fulltexsourcename,"lus")
- local fullspecificationname = resolvers.find_file(specificationname,"tex") or ""
+ local fullspecificationname = resolvers.findfile(specificationname,"tex") or ""
if fullspecificationname == "" then
specificationname = file.join(texsourcepath,"context.lus")
- fullspecificationname = resolvers.find_file(specificationname,"tex") or ""
+ fullspecificationname = resolvers.findfile(specificationname,"tex") or ""
end
if fullspecificationname == "" then
logs.simple("unknown stub specification: %s",specificationname)
@@ -69,7 +69,7 @@ function environment.make_format(name)
logs.simple("creating initialization file: %s",luastubname)
utilities.merger.selfcreate(usedlualibs,specificationpath,luastubname)
-- compile stub file (does not save that much as we don't use this stub at startup any more)
- local strip = resolvers.boolean_variable("LUACSTRIP", true)
+ local strip = resolvers.booleanvariable("LUACSTRIP", true)
if utilities.lua.compile(luastubname,lucstubname,false,strip) and lfs.isfile(lucstubname) then
logs.simple("using compiled initialization file: %s",lucstubname)
usedluastub = lucstubname
@@ -106,9 +106,9 @@ function environment.run_format(name,data,more)
local barename = file.removesuffix(name)
local fmtname = caches.getfirstreadablefile(file.addsuffix(barename,"fmt"),"formats")
if fmtname == "" then
- fmtname = resolvers.find_file(file.addsuffix(barename,"fmt")) or ""
+ fmtname = resolvers.findfile(file.addsuffix(barename,"fmt")) or ""
end
- fmtname = resolvers.clean_path(fmtname)
+ fmtname = resolvers.cleanpath(fmtname)
if fmtname == "" then
logs.simple("no format with name: %s",name)
else
diff --git a/tex/context/base/luat-ini.lua b/tex/context/base/luat-ini.lua
index 2f12503ad..b57962c2d 100644
--- a/tex/context/base/luat-ini.lua
+++ b/tex/context/base/luat-ini.lua
@@ -13,6 +13,8 @@ local string, table, lpeg, math, io, system = string, table, lpeg, math, io, sys
local next, setfenv = next, setfenv or debug.setfenv
local format = string.format
+local mark = utilities.storage.mark
+
--[[ldx--
<p>We cannot load anything yet. However what we will do us reserve a fewtables.
These can be used for runtime user data or third party modules and will not be
@@ -31,8 +33,8 @@ document = document or { }
<p>These can be used/set by the caller program; <t>mtx-context.lua</t> does it.</p>
--ldx]]--
-document.arguments = document.arguments or { }
-document.files = document.files or { }
+document.arguments = mark(document.arguments or { })
+document.files = mark(document.files or { })
--[[ldx--
<p>Please create a namespace within these tables before using them!</p>
diff --git a/tex/context/base/luat-iop.lua b/tex/context/base/luat-iop.lua
index e6f8a7433..6642a2383 100644
--- a/tex/context/base/luat-iop.lua
+++ b/tex/context/base/luat-iop.lua
@@ -13,10 +13,12 @@ if not modules then modules = { } end modules ['luat-iop'] = {
local lower, find, sub = string.lower, string.find, string.sub
+local allocate = utilities.storage.allocate
+
local ioinp = io.inp if not ioinp then ioinp = { } io.inp = ioinp end
local ioout = io.out if not ioout then ioout = { } io.out = ioout end
-ioinp.modes, ioout.modes = { }, { } -- functions
+ioinp.modes, ioout.modes = allocate(), allocate()
local inp_blocked, inp_permitted = { }, { }
local out_blocked, out_permitted = { }, { }
@@ -104,12 +106,12 @@ function ioinp.modes.paranoid()
i_inhibit('%.%.')
i_permit('^%./')
i_permit('[^/]')
- resolvers.do_with_path('TEXMF',i_permit)
+ resolvers.dowithpath('TEXMF',i_permit)
end
function ioout.modes.paranoid()
o_inhibit('.*')
- resolvers.do_with_path('TEXMFOUTPUT',o_permit)
+ resolvers.dowithpath('TEXMFOUTPUT',o_permit)
end
-- handy
diff --git a/tex/context/base/luat-lib.mkiv b/tex/context/base/luat-lib.mkiv
index 366578efb..02bafa4cc 100644
--- a/tex/context/base/luat-lib.mkiv
+++ b/tex/context/base/luat-lib.mkiv
@@ -21,6 +21,7 @@
\registerctxluafile{util-tab}{1.001}
\registerctxluafile{util-fmt}{1.001}
\registerctxluafile{util-deb}{1.001} % could also be done in trac-deb.mkiv
+\registerctxluafile{util-sto}{1.001} % could also be done in trac-deb.mkiv
\registerctxluafile{trac-inf}{1.001}
\registerctxluafile{trac-set}{1.001}
diff --git a/tex/context/base/luat-sto.lua b/tex/context/base/luat-sto.lua
index 0c810021d..2ead0253d 100644
--- a/tex/context/base/luat-sto.lua
+++ b/tex/context/base/luat-sto.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['luat-sto'] = {
license = "see context related readme files"
}
-local type, next = type, next
+local type, next, setmetatable, getmetatable = type, next, setmetatable, getmetatable
local gmatch, format, write_nl = string.gmatch, string.format, texio.write_nl
local report_storage = logs.new("storage")
@@ -25,8 +25,21 @@ storage.max = storage.min - 1
storage.noftables = storage.noftables or 0
storage.nofmodules = storage.nofmodules or 0
+storage.mark = utilities.storage.mark
+storage.allocate = utilities.storage.allocate
+storage.marked = utilities.storage.marked
+
function storage.register(...)
- data[#data+1] = { ... }
+ local t = { ... }
+ local d = t[2]
+ if d then
+ storage.mark(d)
+ else
+ report_storage("fatal error: invalid storage '%s'",t[1])
+ os.exit()
+ end
+ data[#data+1] = t
+ return t
end
-- evaluators .. messy .. to be redone
@@ -140,3 +153,11 @@ storage.shared = storage.shared or { }
-- (non table) values.
storage.register("storage/shared", storage.shared, "storage.shared")
+
+local mark = storage.mark
+
+if string.patterns then mark(string.patterns) end
+if lpeg.patterns then mark(lpeg.patterns) end
+if os.env then mark(os.env) end
+if number.dimenfactors then mark(number.dimenfactors) end
+if libraries then for k,v in next, libraries do mark(v) end end
diff --git a/tex/context/base/lxml-aux.lua b/tex/context/base/lxml-aux.lua
index f2e3ee61e..f01478dbe 100644
--- a/tex/context/base/lxml-aux.lua
+++ b/tex/context/base/lxml-aux.lua
@@ -79,7 +79,7 @@ function xml.withelement(e,n,handle) -- slow
end
function xml.each(root,pattern,handle,reverse)
- local collected = xmlapplylpath({ root },pattern)
+ local collected = xmlapplylpath(root,pattern)
if collected then
if reverse then
for c=#collected,1,-1 do
@@ -95,7 +95,7 @@ function xml.each(root,pattern,handle,reverse)
end
function xml.processattributes(root,pattern,handle)
- local collected = xmlapplylpath({ root },pattern)
+ local collected = xmlapplylpath(root,pattern)
if collected and handle then
for c=1,#collected do
handle(collected[c].at)
@@ -111,11 +111,11 @@ end
-- are these still needed -> lxml-cmp.lua
function xml.collect(root, pattern)
- return xmlapplylpath({ root },pattern)
+ return xmlapplylpath(root,pattern)
end
function xml.collecttexts(root, pattern, flatten) -- todo: variant with handle
- local collected = xmlapplylpath({ root },pattern)
+ local collected = xmlapplylpath(root,pattern)
if collected and flatten then
local xmltostring = xml.tostring
for c=1,#collected do
@@ -126,7 +126,7 @@ function xml.collecttexts(root, pattern, flatten) -- todo: variant with handle
end
function xml.collect_tags(root, pattern, nonamespace)
- local collected = xmlapplylpath({ root },pattern)
+ local collected = xmlapplylpath(root,pattern)
if collected then
local t = { }
for c=1,#collected do
@@ -197,7 +197,7 @@ local function copiedelement(element,newparent)
end
function xml.delete(root,pattern)
- local collected = xmlapplylpath({ root },pattern)
+ local collected = xmlapplylpath(root,pattern)
if collected then
for c=1,#collected do
local e = collected[c]
@@ -216,7 +216,7 @@ end
function xml.replace(root,pattern,whatever)
local element = root and xmltoelement(whatever,root)
- local collected = element and xmlapplylpath({ root },pattern)
+ local collected = element and xmlapplylpath(root,pattern)
if collected then
for c=1,#collected do
local e = collected[c]
@@ -235,7 +235,7 @@ end
local function inject_element(root,pattern,whatever,prepend)
local element = root and xmltoelement(whatever,root)
- local collected = element and xmlapplylpath({ root },pattern)
+ local collected = element and xmlapplylpath(root,pattern)
if collected then
for c=1,#collected do
local e = collected[c]
@@ -266,7 +266,7 @@ end
local function insert_element(root,pattern,whatever,before) -- todo: element als functie
local element = root and xmltoelement(whatever,root)
- local collected = element and xmlapplylpath({ root },pattern)
+ local collected = element and xmlapplylpath(root,pattern)
if collected then
for c=1,#collected do
local e = collected[c]
@@ -292,7 +292,7 @@ local function include(xmldata,pattern,attribute,recursive,loaddata)
-- attribute = attribute or 'href'
pattern = pattern or 'include'
loaddata = loaddata or io.loaddata
- local collected = xmlapplylpath({ xmldata },pattern)
+ local collected = xmlapplylpath(xmldata,pattern)
if collected then
for c=1,#collected do
local ek = collected[c]
@@ -335,74 +335,80 @@ end
xml.include = include
-function xml.strip(root, pattern, nolines, anywhere) -- strips all leading and trailing spacing
- local collected = xmlapplylpath({ root },pattern) -- beware, indices no longer are valid now
- if collected then
- for i=1,#collected do
- local e = collected[i]
- local edt = e.dt
- if edt then
- if anywhere then
- local t = { }
- for e=1,#edt do
- local str = edt[e]
- if type(str) ~= "string" then
- t[#t+1] = str
- elseif str ~= "" then
- -- todo: lpeg for each case
- if nolines then
- str = gsub(str,"%s+"," ")
- end
- str = gsub(str,"^%s*(.-)%s*$","%1")
- if str ~= "" then
- t[#t+1] = str
- end
- end
+local function stripelement(e,nolines,anywhere)
+ local edt = e.dt
+ if edt then
+ if anywhere then
+ local t = { }
+ for e=1,#edt do
+ local str = edt[e]
+ if type(str) ~= "string" then
+ t[#t+1] = str
+ elseif str ~= "" then
+ -- todo: lpeg for each case
+ if nolines then
+ str = gsub(str,"%s+"," ")
+ end
+ str = gsub(str,"^%s*(.-)%s*$","%1")
+ if str ~= "" then
+ t[#t+1] = str
end
- e.dt = t
+ end
+ end
+ e.dt = t
+ else
+ -- we can assume a regular sparse xml table with no successive strings
+ -- otherwise we should use a while loop
+ if #edt > 0 then
+ -- strip front
+ local str = edt[1]
+ if type(str) ~= "string" then
+ -- nothing
+ elseif str == "" then
+ remove(edt,1)
else
- -- we can assume a regular sparse xml table with no successive strings
- -- otherwise we should use a while loop
- if #edt > 0 then
- -- strip front
- local str = edt[1]
- if type(str) ~= "string" then
- -- nothing
- elseif str == "" then
- remove(edt,1)
- else
- if nolines then
- str = gsub(str,"%s+"," ")
- end
- str = gsub(str,"^%s+","")
- if str == "" then
- remove(edt,1)
- else
- edt[1] = str
- end
- end
+ if nolines then
+ str = gsub(str,"%s+"," ")
end
- if #edt > 1 then
- -- strip end
- local str = edt[#edt]
- if type(str) ~= "string" then
- -- nothing
- elseif str == "" then
- remove(edt)
- else
- if nolines then
- str = gsub(str,"%s+"," ")
- end
- str = gsub(str,"%s+$","")
- if str == "" then
- remove(edt)
- else
- edt[#edt] = str
- end
- end
+ str = gsub(str,"^%s+","")
+ if str == "" then
+ remove(edt,1)
+ else
+ edt[1] = str
end
end
end
+ if #edt > 1 then
+ -- strip end
+ local str = edt[#edt]
+ if type(str) ~= "string" then
+ -- nothing
+ elseif str == "" then
+ remove(edt)
+ else
+ if nolines then
+ str = gsub(str,"%s+"," ")
+ end
+ str = gsub(str,"%s+$","")
+ if str == "" then
+ remove(edt)
+ else
+ edt[#edt] = str
+ end
+ end
+ end
+ end
+ end
+ return e -- convenient
+end
+
+xml.stripelement = stripelement
+
+function xml.strip(root,pattern,nolines,anywhere) -- strips all leading and trailing spacing
+ local collected = xmlapplylpath(root,pattern) -- beware, indices no longer are valid now
+ if collected then
+ for i=1,#collected do
+ stripelement(collected[i],nolines,anywhere)
end
end
end
@@ -429,7 +435,7 @@ end
xml.renamespace = renamespace
function xml.remaptag(root, pattern, newtg)
- local collected = xmlapplylpath({ root },pattern)
+ local collected = xmlapplylpath(root,pattern)
if collected then
for c=1,#collected do
collected[c].tg = newtg
@@ -438,7 +444,7 @@ function xml.remaptag(root, pattern, newtg)
end
function xml.remapnamespace(root, pattern, newns)
- local collected = xmlapplylpath({ root },pattern)
+ local collected = xmlapplylpath(root,pattern)
if collected then
for c=1,#collected do
collected[c].ns = newns
@@ -447,7 +453,7 @@ function xml.remapnamespace(root, pattern, newns)
end
function xml.checknamespace(root, pattern, newns)
- local collected = xmlapplylpath({ root },pattern)
+ local collected = xmlapplylpath(root,pattern)
if collected then
for c=1,#collected do
local e = collected[c]
@@ -459,7 +465,7 @@ function xml.checknamespace(root, pattern, newns)
end
function xml.remapname(root, pattern, newtg, newns, newrn)
- local collected = xmlapplylpath({ root },pattern)
+ local collected = xmlapplylpath(root,pattern)
if collected then
for c=1,#collected do
local e = collected[c]
diff --git a/tex/context/base/lxml-ini.mkiv b/tex/context/base/lxml-ini.mkiv
index 8bc154df4..83a9825bf 100644
--- a/tex/context/base/lxml-ini.mkiv
+++ b/tex/context/base/lxml-ini.mkiv
@@ -314,24 +314,6 @@
\c!compress=\v!no, % strip comment
\c!entities=\v!yes] % replace entities
-% \defineXMLenvironment[y]{(}{)}
-%
-% \startxmlsetups x
-% /\xmlflush{#1}/
-% \stopxmlsetups
-%
-% \startxmlsetups xx
-% \xmlsetsetup{main}{x}{*}
-% \stopxmlsetups
-%
-% \xmlregistersetup{xx}
-%
-% \startbuffer
-% <x>a &lt;&amp;&gt;<y>{b}</y> c</x>
-% \stopbuffer
-%
-% mkii: [\processXMLbuffer]\quad mkiv: [\xmlprocessbuffer{main}{}{}]
-
\def\xmlmapvalue #1#2#3{\setvalue{\??xm:v:#1:#2}{#3}} % keep #3 to grab spaces
\def\xmlvalue #1#2#3{\executeifdefined{\??xm:v:#1:#2}{#3}}
%def\xmlvalue #1#2{\ifcsname\??xm:v:#1:#2\endcsname\csname\??xm:v:#1:#2\expandafter\expandafter\gobbleoneargument\expandafter\endcsname\else\expandafter\firstofoneargument\fi}
diff --git a/tex/context/base/lxml-lpt.lua b/tex/context/base/lxml-lpt.lua
index 954e72194..ef9a1175b 100644
--- a/tex/context/base/lxml-lpt.lua
+++ b/tex/context/base/lxml-lpt.lua
@@ -998,8 +998,41 @@ local function normal_apply(list,parsed,nofparsed,order)
return collected
end
+--~ local function applylpath(list,pattern)
+--~ -- we avoid an extra call
+--~ local parsed = cache[pattern]
+--~ if parsed then
+--~ lpathcalls = lpathcalls + 1
+--~ lpathcached = lpathcached + 1
+--~ elseif type(pattern) == "table" then
+--~ lpathcalls = lpathcalls + 1
+--~ parsed = pattern
+--~ else
+--~ parsed = lpath(pattern) or pattern
+--~ end
+--~ if not parsed then
+--~ return
+--~ end
+--~ local nofparsed = #parsed
+--~ if nofparsed == 0 then
+--~ return -- something is wrong
+--~ end
+--~ local one = list[1] -- we could have a third argument: isroot and list or list[1] or whatever we like ... todo
+--~ if not one then
+--~ return -- something is wrong
+--~ elseif not trace_lpath then
+--~ return normal_apply(list,parsed,nofparsed,one.mi)
+--~ elseif trace_lprofile then
+--~ return profiled_apply(list,parsed,nofparsed,one.mi)
+--~ else
+--~ return traced_apply(list,parsed,nofparsed,one.mi)
+--~ end
+--~ end
+
local function applylpath(list,pattern)
- -- we avoid an extra call
+ if not list then
+ return
+ end
local parsed = cache[pattern]
if parsed then
lpathcalls = lpathcalls + 1
@@ -1017,27 +1050,32 @@ local function applylpath(list,pattern)
if nofparsed == 0 then
return -- something is wrong
end
- local one = list[1] -- we could have a third argument: isroot and list or list[1] or whatever we like ... todo
- if not one then
- return -- something is wrong
- elseif not trace_lpath then
- return normal_apply(list,parsed,nofparsed,one.mi)
+ if not trace_lpath then
+ return normal_apply ({ list },parsed,nofparsed,list.mi)
elseif trace_lprofile then
- return profiled_apply(list,parsed,nofparsed,one.mi)
+ return profiled_apply({ list },parsed,nofparsed,list.mi)
else
- return traced_apply(list,parsed,nofparsed,one.mi)
+ return traced_apply ({ list },parsed,nofparsed,list.mi)
end
end
xml.applylpath = applylpath -- takes a table as first argment, which is what xml.filter will do
+--[[ldx--
+<p>This is the main filter function. It returns whatever is asked for.</p>
+--ldx]]--
+
+function xml.filter(root,pattern) -- no longer funny attribute handling here
+ return applylpath(root,pattern)
+end
+
-- internal (parsed)
expressions.child = function(e,pattern)
- return applylpath({ e },pattern) -- todo: cache
+ return applylpath(e,pattern) -- todo: cache
end
expressions.count = function(e,pattern)
- local collected = applylpath({ e },pattern) -- todo: cache
+ local collected = applylpath(e,pattern) -- todo: cache
return (collected and #collected) or 0
end
@@ -1077,7 +1115,7 @@ expressions.boolean = toboolean
local function traverse(root,pattern,handle)
report_lpath("use 'xml.selection' instead for '%s'",pattern)
- local collected = applylpath({ root },pattern)
+ local collected = applylpath(root,pattern)
if collected then
for c=1,#collected do
local e = collected[c]
@@ -1088,7 +1126,7 @@ local function traverse(root,pattern,handle)
end
local function selection(root,pattern,handle)
- local collected = applylpath({ root },pattern)
+ local collected = applylpath(root,pattern)
if collected then
if handle then
for c=1,#collected do
@@ -1216,14 +1254,6 @@ expressions.tag = function(e,n) -- only tg
end
--[[ldx--
-<p>This is the main filter function. It returns whatever is asked for.</p>
---ldx]]--
-
-function xml.filter(root,pattern) -- no longer funny attribute handling here
- return applylpath({ root },pattern)
-end
-
---[[ldx--
<p>Often using an iterators looks nicer in the code than passing handler
functions. The <l n='lua'/> book describes how to use coroutines for that
purpose (<url href='http://www.lua.org/pil/9.3.html'/>). This permits
@@ -1242,7 +1272,7 @@ end
local wrap, yield = coroutine.wrap, coroutine.yield
function xml.elements(root,pattern,reverse) -- r, d, k
- local collected = applylpath({ root },pattern)
+ local collected = applylpath(root,pattern)
if collected then
if reverse then
return wrap(function() for c=#collected,1,-1 do
@@ -1258,7 +1288,7 @@ function xml.elements(root,pattern,reverse) -- r, d, k
end
function xml.collected(root,pattern,reverse) -- e
- local collected = applylpath({ root },pattern)
+ local collected = applylpath(root,pattern)
if collected then
if reverse then
return wrap(function() for c=#collected,1,-1 do yield(collected[c]) end end)
diff --git a/tex/context/base/lxml-tab.lua b/tex/context/base/lxml-tab.lua
index 270c83e5a..6a85a4853 100644
--- a/tex/context/base/lxml-tab.lua
+++ b/tex/context/base/lxml-tab.lua
@@ -898,7 +898,7 @@ local function serialize(e,handlers,...)
-- elseif type(e) == "string" then
-- functions["@tx@"](e,handlers)
else
- functions["@dc@"](e,handlers)
+ functions["@dc@"](e,handlers) -- dc ?
end
if finalize then
return finalize()
@@ -936,6 +936,7 @@ local function newhandlers(settings)
handlers[settings.name] = t
end
end
+ utilities.storage.mark(t)
return t
end
diff --git a/tex/context/base/lxml-tex.lua b/tex/context/base/lxml-tex.lua
index 19614770e..a0a34cb0d 100644
--- a/tex/context/base/lxml-tex.lua
+++ b/tex/context/base/lxml-tex.lua
@@ -9,29 +9,18 @@ if not modules then modules = { } end modules ['lxml-tst'] = {
local utf = unicode.utf8
local utfchar = utf.char
-local concat, insert, remove, gsub, find = table.concat, table.insert, table.remove
+local concat, insert, remove = table.concat, table.insert, table.remove
local format, sub, gsub, find, gmatch, match = string.format, string.sub, string.gsub, string.find, string.gmatch, string.match
local type, next, tonumber, tostring = type, next, tonumber, tostring
local lpegmatch = lpeg.match
local P, S, C, Cc = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc
local tex, xml = tex, xml
+local lowerchars, upperchars, lettered = characters.lower, characters.upper, characters.lettered
lxml = lxml or { }
local lxml = lxml
-if not tex and not tex.sprint then -- no longer needed
- tex = {
- sprint = function(catcodes,...) texio.write(table.concat{...}) end,
- print = function(catcodes,...) texio.write(table.concat{...}) end,
- write = function( ...) texio.write(table.concat{...}) end,
- }
- commands = {
- writestatus = logs.report
- }
- resolvers.loadbinfile = function(filename) return true, io.loaddata(filename) end
-end
-
local texsprint, texprint, texwrite = tex.sprint, tex.print, tex.write
local texcatcodes, ctxcatcodes, vrbcatcodes, notcatcodes = tex.texcatcodes, tex.ctxcatcodes, tex.vrbcatcodes, tex.notcatcodes
@@ -286,14 +275,14 @@ lxml.addindex = addindex
-- another cache
local function lxmlapplylpath(id,pattern) -- better inline, saves call
- return xmlapplylpath({ getid(id) }, pattern)
+ return xmlapplylpath(getid(id),pattern)
end
lxml.filter = lxmlapplylpath
function lxml.filterlist(list,pattern)
for s in gmatch(list,"[^, ]+") do -- we could cache a table
- xmlapplylpath({ getid(s) }, pattern)
+ xmlapplylpath(getid(s),pattern)
end
end
@@ -369,8 +358,10 @@ function lxml.load(id,filename,compress,entities)
end
noffiles, nofconverted = noffiles + 1, nofconverted + 1
-- local xmltable = xml.load(filename)
+ starttiming(xml)
local ok, data = resolvers.loadbinfile(filename)
local xmltable = lxml.convert(id,(ok and data) or "",compress,entities)
+ stoptiming(xml)
lxml.store(id,xmltable,filename)
return xmltable, filename
end
@@ -554,7 +545,7 @@ function lxml.serialize(root)
end
function lxml.setaction(id,pattern,action)
- local collected = xmlapplylpath({ getid(id) }, pattern)
+ local collected = xmlapplylpath(getid(id),pattern)
if collected then
for c=1,#collected do
collected[c].command = action
@@ -760,7 +751,7 @@ end
function lxml.setsetup(id,pattern,setup)
if not setup or setup == "" or setup == "*" or setup == "-" or setup == "+" then
- local collected = xmlapplylpath({ getid(id) }, pattern)
+ local collected = xmlapplylpath(getid(id),pattern)
if collected then
if trace_setups then
for c=1, #collected do
@@ -803,7 +794,7 @@ function lxml.setsetup(id,pattern,setup)
else
local a, b = match(setup,"^(.+:)([%*%-])$")
if a and b then
- local collected = xmlapplylpath({ getid(id) }, pattern)
+ local collected = xmlapplylpath(getid(id),pattern)
if collected then
if trace_setups then
for c=1, #collected do
@@ -848,7 +839,7 @@ function lxml.setsetup(id,pattern,setup)
report_lxml("no lpath matches for %s",pattern)
end
else
- local collected = xmlapplylpath({ getid(id) }, pattern)
+ local collected = xmlapplylpath(getid(id),pattern)
if collected then
if trace_setups then
for c=1, #collected do
@@ -996,6 +987,14 @@ local function text(collected)
end
end
+local function stripped(collected)
+ if collected then
+ for c=1,#collected do
+ cprint(xml.stripelement(collected[c]))
+ end
+ end
+end
+
local function ctxtext(collected)
if collected then
for c=1,#collected do
@@ -1004,8 +1003,6 @@ local function ctxtext(collected)
end
end
-local lowerchars, upperchars = characters.lower, characters.upper
-
local function lower(collected)
if collected then
for c=1,#collected do
@@ -1068,6 +1065,7 @@ finalizers.count = count
finalizers.command = command
finalizers.attribute = attribute
finalizers.text = text
+finalizers.stripped = stripped
finalizers.lower = lower
finalizers.upper = upper
finalizers.ctxtext = ctxtext
@@ -1154,21 +1152,21 @@ lxml.verbatim = verbatim
-- helpers
function lxml.first(id,pattern)
- local collected = xmlapplylpath({ getid(id) }, pattern)
+ local collected = xmlapplylpath(getid(id),pattern)
if collected then
first(collected)
end
end
function lxml.last(id,pattern)
- local collected = xmlapplylpath({ getid(id) }, pattern)
+ local collected = xmlapplylpath(getid(id),pattern)
if collected then
last(collected)
end
end
function lxml.all(id,pattern)
- local collected = xmlapplylpath({ getid(id) }, pattern)
+ local collected = xmlapplylpath(getid(id),pattern)
if collected then
all(collected)
end
@@ -1176,18 +1174,18 @@ end
function lxml.count(id,pattern)
-- always needs to produce a result so no test here
- count(xmlapplylpath({ getid(id) }, pattern))
+ count(xmlapplylpath(getid(id),pattern))
end
function lxml.attribute(id,pattern,a,default)
- local collected = xmlapplylpath({ getid(id) }, pattern)
+ local collected = xmlapplylpath(getid(id),pattern)
if collected then
attribute(collected,a,default)
end
end
function lxml.raw(id,pattern) -- the content, untouched by commands
- local collected = (pattern and xmlapplylpath({ getid(id) }, pattern)) or getid(id)
+ local collected = (pattern and xmlapplylpath(getid(id),pattern)) or getid(id)
if collected then
texsprint(xmltostring(collected[1].dt))
end
@@ -1199,7 +1197,7 @@ function lxml.context(id,pattern) -- the content, untouched by commands
-- texsprint(ctxcatcodes,collected.dt[1])
ctx_text(collected.dt[1])
else
- local collected = xmlapplylpath({ getid(id) }, pattern) or getid(id)
+ local collected = xmlapplylpath(getid(id),pattern) or getid(id)
if collected and #collected > 0 then
texsprint(ctxcatcodes,collected[1].dt)
end
@@ -1207,7 +1205,7 @@ function lxml.context(id,pattern) -- the content, untouched by commands
end
function lxml.text(id,pattern)
- local collected = (pattern and xmlapplylpath({ getid(id) }, pattern)) or getid(id)
+ local collected = (pattern and xmlapplylpath(getid(id),pattern)) or getid(id)
if collected then
text(collected)
end
@@ -1216,29 +1214,29 @@ end
lxml.content = text
function lxml.position(id,pattern,n)
- local collected = xmlapplylpath({ getid(id) }, pattern)
+ local collected = xmlapplylpath(getid(id),pattern)
if collected then
position(collected,n)
end
end
function lxml.chainattribute(id,pattern,a,default)
- local collected = xmlapplylpath({ getid(id) }, pattern)
+ local collected = xmlapplylpath(getid(id),pattern)
if collected then
chainattribute(collected,a,default)
end
end
function lxml.concatrange(id,pattern,start,stop,separator,lastseparator,textonly) -- test this on mml
- concatrange(xmlapplylpath({ getid(id) }, pattern),start,stop,separator,lastseparator,textonly)
+ concatrange(xmlapplylpath(getid(id),pattern),start,stop,separator,lastseparator,textonly)
end
function lxml.concat(id,pattern,separator,lastseparator,textonly)
- concatrange(xmlapplylpath({ getid(id) }, pattern),false,false,separator,lastseparator,textonly)
+ concatrange(xmlapplylpath(getid(id),pattern),false,false,separator,lastseparator,textonly)
end
function lxml.element(id,n)
- position(xmlapplylpath({ getid(id) },"/*"),n)
+ position(xmlapplylpath(getid(id),"/*"),n)
end
lxml.index = lxml.position
@@ -1308,7 +1306,7 @@ end
function lxml.command(id,pattern,cmd)
local i, p = getid(id,true)
- local collected = xmlapplylpath({ getid(i) }, pattern)
+ local collected = xmlapplylpath(getid(i),pattern)
if collected then
local rootname = p or i.name
for c=1,#collected do
@@ -1421,3 +1419,21 @@ end
lxml.obsolete = { }
lxml.get_id = getid lxml.obsolete.get_id = getid
+
+-- goodies:
+
+function xml.finalizers.tex.lettered(collected)
+ if collected then
+ for c=1,#collected do
+ texsprint(ctxcatcodes,lettered(collected[1].dt[1]))
+ end
+ end
+end
+
+--~ function xml.finalizers.tex.apply(collected,what) -- to be tested
+--~ if collected then
+--~ for c=1,#collected do
+--~ texsprint(ctxcatcodes,what(collected[1].dt[1]))
+--~ end
+--~ end
+--~ end
diff --git a/tex/context/base/m-punk.mkiv b/tex/context/base/m-punk.mkiv
index 7d8815c4b..46225d4ea 100644
--- a/tex/context/base/m-punk.mkiv
+++ b/tex/context/base/m-punk.mkiv
@@ -95,7 +95,7 @@ function metapost.characters.process(mpxformat, name, instances, scalefactor)
if not lists then
statistics.starttiming(flusher)
-- we can use a format per font
- local data = io.loaddata(resolvers.find_file(name))
+ local data = io.loaddata(resolvers.findfile(name))
metapost.reset(mpxformat)
metapost.setoutercolor(2) -- no outer color and no reset either
lists = { }
@@ -163,16 +163,16 @@ function fonts.vf.aux.combine.commands.metafont(g,v)
g.variants = list
end
-fonts.define.methods.install( "punk", {
+fonts.definers.methods.install( "punk", {
{ "metafont", "mfplain", "punkfont.mp", 10 },
} )
-fonts.define.methods.install( "punkbold", {
+fonts.definers.methods.install( "punkbold", {
{ "metafont", "mfplain", "punkfont-bold.mp", 10 },
} )
-fonts.define.methods.install( "punkslanted", {
+fonts.definers.methods.install( "punkslanted", {
{ "metafont", "mfplain", "punkfont-slanted.mp", 10 },
} )
-fonts.define.methods.install( "punkboldslanted", {
+fonts.definers.methods.install( "punkboldslanted", {
{ "metafont", "mfplain", "punkfont-boldslanted.mp", 10 },
} )
diff --git a/tex/context/base/math-ent.lua b/tex/context/base/math-ent.lua
index 53a68ab0f..0eaa02939 100644
--- a/tex/context/base/math-ent.lua
+++ b/tex/context/base/math-ent.lua
@@ -7,7 +7,7 @@ if not modules then modules = { } end modules ['math-ent'] = {
-- this might go into char-def
-mathematics.xml.entities={
+mathematics.xml.entities = utilities.storage.allocate {
["Aacute"]=0x000C1,
["aacute"]=0x000E1,
["Abreve"]=0x00102,
diff --git a/tex/context/base/math-ini.lua b/tex/context/base/math-ini.lua
index 9efc8ce64..838ed0e80 100644
--- a/tex/context/base/math-ini.lua
+++ b/tex/context/base/math-ini.lua
@@ -13,6 +13,8 @@ local utf = unicode.utf8
local texsprint, format, utfchar, utfbyte = tex.sprint, string.format, utf.char, utf.byte
+local allocate = utilities.storage.allocate
+
local trace_defining = false trackers.register("math.defining", function(v) trace_defining = v end)
local report_math = logs.new("mathematics")
@@ -23,11 +25,11 @@ local mathematics = mathematics
mathematics.extrabase = 0xFE000 -- here we push some virtuals
mathematics.privatebase = 0xFF000 -- here we push the ex
-local families = {
+local families = allocate {
tf = 0, it = 1, sl = 2, bf = 3, bi = 4, bs = 5, -- virtual fonts or unicode otf
}
-local classes = {
+local classes = allocate {
ord = 0, -- mathordcomm mathord
op = 1, -- mathopcomm mathop
bin = 2, -- mathbincomm mathbin
diff --git a/tex/context/base/math-ini.mkiv b/tex/context/base/math-ini.mkiv
index 203d88156..74f40543d 100644
--- a/tex/context/base/math-ini.mkiv
+++ b/tex/context/base/math-ini.mkiv
@@ -409,6 +409,10 @@
\stopextendcatcodetable
\to \everydonknuthmode
+%D Even more drastic:
+
+\def\asciimode{\catcodetable\txtcatcodes\nonknuthmode}
+
%D Needed for unicode:
\def\nulloperator{\mathortext{\mathop{\null}}{\null}}
diff --git a/tex/context/base/math-map.lua b/tex/context/base/math-map.lua
index 0a3611cca..17841bde6 100644
--- a/tex/context/base/math-map.lua
+++ b/tex/context/base/math-map.lua
@@ -22,6 +22,8 @@ if not modules then modules = { } end modules ['math-map'] = {
local type, next = type, next
local floor, div = math.floor, math.div
+local allocate = utilities.storage.allocate
+
local texattribute = tex.attribute
local trace_greek = false trackers.register("math.greek", function(v) trace_greek = v end)
@@ -35,7 +37,7 @@ local mathematics = mathematics
-- following approach permits easier remapping of a-a, A-Z and 0-9 to
-- fallbacks; symbols is currently mostly greek
-mathematics.alphabets = {
+mathematics.alphabets = allocate {
regular = {
tf = {
digits = 0x00030,
diff --git a/tex/context/base/math-noa.lua b/tex/context/base/math-noa.lua
index 50052c65c..c22b1272f 100644
--- a/tex/context/base/math-noa.lua
+++ b/tex/context/base/math-noa.lua
@@ -131,7 +131,7 @@ local function report_remap(tag,id,old,new,extra)
end
local remapalphabets = mathematics.remapalphabets
-local fcs = fonts.color.set
+local fcs = fonts.colors.set
-- we can have a global famdata == fonts.famdata and chrdata == fonts.chrdata
diff --git a/tex/context/base/math-vfu.lua b/tex/context/base/math-vfu.lua
index 9213ab6ee..183eefc2a 100644
--- a/tex/context/base/math-vfu.lua
+++ b/tex/context/base/math-vfu.lua
@@ -20,7 +20,9 @@ local report_virtual = logs.new("virtual math")
local fonts, nodes, mathematics = fonts, nodes, mathematics
-fonts.enc.math = fonts.enc.math or { }
+local mathencodings = utilities.storage.allocate { }
+
+fonts.enc.math = mathencodings -- better is then: fonts.enc.vectors
local shared = { }
@@ -371,7 +373,7 @@ local reverse -- index -> unicode
function fonts.vf.math.define(specification,set)
if not reverse then
reverse = { }
- for k, v in next, fonts.enc.math do
+ for k, v in next, mathencodings do
local r = { }
for u, i in next, v do
r[i] = u
@@ -394,7 +396,7 @@ function fonts.vf.math.define(specification,set)
else
if ss.features then ssname = ssname .. "*" .. ss.features end
if ss.main then main = s end
- local f, id = fonts.tfm.read_and_define(ssname,size)
+ local f, id = fonts.tfm.readanddefine(ssname,size)
if not f then
report_virtual("loading font %s subfont %s with name %s at %s is skipped, not found",name,s,ssname,size)
else
@@ -408,7 +410,7 @@ function fonts.vf.math.define(specification,set)
end
if not ss.checked then
ss.checked = true
- local vector = fonts.enc.math[ss.vector]
+ local vector = mathencodings[ss.vector]
if vector then
-- we resolve named glyphs only once as we can assume that vectors
-- are unique to a font set (when we read an afm we get those names
@@ -485,7 +487,7 @@ function fonts.vf.math.define(specification,set)
local vectorname = ss.vector
if vectorname then
local offset = 0xFF000
- local vector = fonts.enc.math[vectorname]
+ local vector = mathencodings[vectorname]
local rotcev = reverse[vectorname]
if vector then
local fc, fd, si = fs.characters, fs.descriptions, shared[s]
@@ -555,7 +557,7 @@ function fonts.vf.math.define(specification,set)
end
if ss.extension then
-- todo: if multiple ex, then 256 offsets per instance
- local extension = fonts.enc.math["large-to-small"]
+ local extension = mathencodings["large-to-small"]
local variants_done = fs.variants_done
for index, fci in next, fc do -- the raw ex file
if type(index) == "number" then
@@ -691,14 +693,14 @@ function fonts.vf.math.define(specification,set)
end
function mathematics.makefont(name, set)
- fonts.define.methods[name] = function(specification)
+ fonts.definers.methods.variants[name] = function(specification)
return fonts.vf.math.define(specification,set)
end
end
-- varphi is part of the alphabet, contrary to the other var*s'
-fonts.enc.math["large-to-small"] = {
+mathencodings["large-to-small"] = {
[0x00028] = 0x00, -- (
[0x00029] = 0x01, -- )
[0x0005B] = 0x02, -- [
@@ -742,7 +744,7 @@ fonts.enc.math["large-to-small"] = {
[0x02044] = 0x0E, -- /
}
-fonts.enc.math["tex-ex"] = {
+mathencodings["tex-ex"] = {
[0x0220F] = 0x51, -- prod
[0x0222B] = 0x52, -- intop
[0x02210] = 0x60, -- coprod
@@ -762,7 +764,7 @@ fonts.enc.math["tex-ex"] = {
-- only math stuff is needed, since we always use an lm or gyre
-- font as main font
-fonts.enc.math["tex-mr"] = {
+mathencodings["tex-mr"] = {
[0x00393] = 0x00, -- Gamma
[0x00394] = 0x01, -- Delta
[0x00398] = 0x02, -- Theta
@@ -817,11 +819,11 @@ fonts.enc.math["tex-mr"] = {
-- [0x000A8] = 0x7F, -- [math]ddot
}
-fonts.enc.math["tex-mr-missing"] = {
+mathencodings["tex-mr-missing"] = {
[0x02236] = 0x3A, -- colon
}
-fonts.enc.math["tex-mi"] = {
+mathencodings["tex-mi"] = {
[0x1D6E4] = 0x00, -- Gamma
[0x1D6E5] = 0x01, -- Delta
[0x1D6E9] = 0x02, -- Theta
@@ -906,7 +908,7 @@ fonts.enc.math["tex-mi"] = {
}
-fonts.enc.math["tex-it"] = {
+mathencodings["tex-it"] = {
-- [0x1D434] = 0x41, -- A
[0x1D6E2] = 0x41, -- Alpha
-- [0x1D435] = 0x42, -- B
@@ -976,14 +978,14 @@ fonts.enc.math["tex-it"] = {
-- [0x1D467] = 0x7A, -- z
}
-fonts.enc.math["tex-ss"] = { }
-fonts.enc.math["tex-tt"] = { }
-fonts.enc.math["tex-bf"] = { }
-fonts.enc.math["tex-bi"] = { }
-fonts.enc.math["tex-fraktur"] = { }
-fonts.enc.math["tex-fraktur-bold"] = { }
+mathencodings["tex-ss"] = { }
+mathencodings["tex-tt"] = { }
+mathencodings["tex-bf"] = { }
+mathencodings["tex-bi"] = { }
+mathencodings["tex-fraktur"] = { }
+mathencodings["tex-fraktur-bold"] = { }
-function fonts.vf.math.set_letters(font_encoding, name, uppercase, lowercase)
+function fonts.vf.math.setletters(font_encoding, name, uppercase, lowercase)
local enc = font_encoding[name]
for i = 0,25 do
enc[uppercase+i] = i + 0x41
@@ -991,14 +993,14 @@ function fonts.vf.math.set_letters(font_encoding, name, uppercase, lowercase)
end
end
-function fonts.vf.math.set_digits(font_encoding, name, digits)
+function fonts.vf.math.setdigits(font_encoding, name, digits)
local enc = font_encoding[name]
for i = 0,9 do
enc[digits+i] = i + 0x30
end
end
-fonts.enc.math["tex-sy"] = {
+mathencodings["tex-sy"] = {
[0x0002D] = 0x00, -- -
[0x02212] = 0x00, -- -
-- [0x02201] = 0x00, -- complement
@@ -1147,7 +1149,7 @@ fonts.enc.math["tex-sy"] = {
-- column, while in the second column we show the tex/ams names. As usual
-- it costs hours to figure out such a table.
-fonts.enc.math["tex-ma"] = {
+mathencodings["tex-ma"] = {
[0x022A1] = 0x00, -- squaredot \boxdot
[0x0229E] = 0x01, -- squareplus \boxplus
[0x022A0] = 0x02, -- squaremultiply \boxtimes
@@ -1281,7 +1283,7 @@ fonts.enc.math["tex-ma"] = {
[0x0229D] = 0x7F, -- circleminus \circleddash
}
-fonts.enc.math["tex-mb"] = {
+mathencodings["tex-mb"] = {
-- [0x0] = 0x00, -- lessornotequal \lvertneqq
-- [0x0] = 0x01, -- greaterornotequal \gvertneqq
[0x02270] = 0x02, -- notlessequal \nleq
@@ -1407,12 +1409,12 @@ fonts.enc.math["tex-mb"] = {
[0x003F6] = 0x7F, -- epsiloninv \backepsilon
}
-fonts.enc.math["tex-mc"] = {
+mathencodings["tex-mc"] = {
-- this file has no tfm so it gets mapped in the private space
[0xFE324] = "mapsfromchar",
}
-fonts.enc.math["tex-fraktur"] = {
+mathencodings["tex-fraktur"] = {
-- [0x1D504] = 0x41, -- A (fraktur A)
-- [0x1D505] = 0x42, -- B
[0x0212D] = 0x43, -- C
@@ -1469,19 +1471,19 @@ fonts.enc.math["tex-fraktur"] = {
-- now that all other vectors are defined ...
-fonts.vf.math.set_letters(fonts.enc.math, "tex-it", 0x1D434, 0x1D44E)
-fonts.vf.math.set_letters(fonts.enc.math, "tex-ss", 0x1D5A0, 0x1D5BA)
-fonts.vf.math.set_letters(fonts.enc.math, "tex-tt", 0x1D670, 0x1D68A)
-fonts.vf.math.set_letters(fonts.enc.math, "tex-bf", 0x1D400, 0x1D41A)
-fonts.vf.math.set_letters(fonts.enc.math, "tex-bi", 0x1D468, 0x1D482)
-fonts.vf.math.set_letters(fonts.enc.math, "tex-fraktur", 0x1D504, 0x1D51E)
-fonts.vf.math.set_letters(fonts.enc.math, "tex-fraktur-bold", 0x1D56C, 0x1D586)
+fonts.vf.math.setletters(mathencodings, "tex-it", 0x1D434, 0x1D44E)
+fonts.vf.math.setletters(mathencodings, "tex-ss", 0x1D5A0, 0x1D5BA)
+fonts.vf.math.setletters(mathencodings, "tex-tt", 0x1D670, 0x1D68A)
+fonts.vf.math.setletters(mathencodings, "tex-bf", 0x1D400, 0x1D41A)
+fonts.vf.math.setletters(mathencodings, "tex-bi", 0x1D468, 0x1D482)
+fonts.vf.math.setletters(mathencodings, "tex-fraktur", 0x1D504, 0x1D51E)
+fonts.vf.math.setletters(mathencodings, "tex-fraktur-bold", 0x1D56C, 0x1D586)
-fonts.vf.math.set_digits (fonts.enc.math, "tex-ss", 0x1D7E2)
-fonts.vf.math.set_digits (fonts.enc.math, "tex-tt", 0x1D7F6)
-fonts.vf.math.set_digits (fonts.enc.math, "tex-bf", 0x1D7CE)
+fonts.vf.math.setdigits (mathencodings, "tex-ss", 0x1D7E2)
+fonts.vf.math.setdigits (mathencodings, "tex-tt", 0x1D7F6)
+fonts.vf.math.setdigits (mathencodings, "tex-bf", 0x1D7CE)
--- fonts.vf.math.set_digits (fonts.enc.math, "tex-bi", 0x1D7CE)
+-- fonts.vf.math.setdigits (mathencodings, "tex-bi", 0x1D7CE)
-- todo: add ss, tt, bf etc vectors
-- todo: we can make ss tt etc an option
diff --git a/tex/context/base/meta-ini.mkiv b/tex/context/base/meta-ini.mkiv
index adc2e5aac..c23220250 100644
--- a/tex/context/base/meta-ini.mkiv
+++ b/tex/context/base/meta-ini.mkiv
@@ -163,13 +163,13 @@
\unexpanded\long\def\processMPgraphic#1% todo: extensions and inclusions outside beginfig
{\dostartcurrentMPgraphic
\forgetall
- \setbox\MPgraphicbox\hbox\bgroup
+ \setbox\MPgraphicbox\hbox\bgroup % ; added 20100901 (as in mkii)
\normalexpanded{\noexpand\ctxlua{metapost.graphic(
"\currentMPgraphicinstance",
"\currentMPgraphicformat",
- \!!bs#1\!!es,
- \!!bs\currentMPinitializations\!!es,
- \!!bs\currentMPpreamble\!!es,
+ \!!bs#1;\!!es,
+ \!!bs\currentMPinitializations;\!!es,
+ \!!bs\currentMPpreamble;\!!es,
\MPaskedfigure
)}}%
\egroup
diff --git a/tex/context/base/meta-pdf.lua b/tex/context/base/meta-pdf.lua
index 0deda627f..4b4126559 100644
--- a/tex/context/base/meta-pdf.lua
+++ b/tex/context/base/meta-pdf.lua
@@ -29,7 +29,7 @@ local pdffinishtransparencycode = lpdf.finishtransparencycode
metapost.mptopdf = metapost.mptopdf or { }
local mptopdf = metapost.mptopdf
-mptopdf.n = 0
+mptopdf.nofconverted = 0
local m_path, m_stack, m_texts, m_version, m_date, m_shortcuts = { }, { }, { }, 0, 0, false
@@ -118,8 +118,7 @@ end
-- mp interface
-metapost.mps = metapost.mps or { }
-local mps = metapost.mps or { }
+local mps = { }
function mps.creator(a, b, c)
m_version = tonumber(b)
@@ -533,8 +532,8 @@ function mptopdf.convertmpstopdf(name)
if ok then
mps.colormodel = tex.attribute[a_colorspace]
statistics.starttiming(mptopdf)
- mptopdf.n = mptopdf.n + 1
- pdfcode(format("\\letterpercent\\space mptopdf begin: n=%s, file=%s",mptopdf.n,file.basename(name)))
+ mptopdf.nofconverted = mptopdf.nofconverted + 1
+ pdfcode(format("\\letterpercent\\space mptopdf begin: n=%s, file=%s",mptopdf.nofconverted,file.basename(name)))
pdfcode("q 1 0 0 1 0 0 cm")
parse(m_data)
pdfcode(pdffinishtransparencycode())
@@ -551,7 +550,7 @@ end
-- status info
statistics.register("mps conversion time",function()
- local n = mptopdf.n
+ local n = mptopdf.nofconverted
if n > 0 then
return format("%s seconds, %s conversions", statistics.elapsedtime(mptopdf),n)
else
diff --git a/tex/context/base/meta-pdh.lua b/tex/context/base/meta-pdh.lua
index 88c050ffb..117300f80 100644
--- a/tex/context/base/meta-pdh.lua
+++ b/tex/context/base/meta-pdh.lua
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['meta-pdf'] = {
license = "see context related readme files"
}
+os.exit()
+
-- This file contains the history of the converter. We keep it around as it
-- relates to the development of luatex.
@@ -32,9 +34,9 @@ local metapost = metapost
metapost.mptopdf = metapost.mptopdf or { }
local mptopdf = metapost.mptopdf
-mptopdf.parsers = { }
-mptopdf.parser = 'none'
-mptopdf.n = 0
+mptopdf.parsers = { }
+mptopdf.parser = 'none'
+mptopdf.nofconverted = 0
function mptopdf.reset()
mptopdf.data = ""
@@ -221,7 +223,7 @@ end
function mptopdf.convertmpstopdf(name)
if mptopdf.loaded(name) then
- mptopdf.n = mptopdf.n + 1
+ mptopdf.nofconverted = mptopdf.nofconverted + 1
statistics.starttiming(mptopdf)
mptopdf.parse()
mptopdf.reset()
@@ -598,7 +600,7 @@ mptopdf.parser = 'lpeg'
-- status info
statistics.register("mps conversion time",function()
- local n = mptopdf.n
+ local n = mptopdf.nofconverted
if n > 0 then
return format("%s seconds, %s conversions", statistics.elapsedtime(mptopdf),n)
else
diff --git a/tex/context/base/mlib-ctx.lua b/tex/context/base/mlib-ctx.lua
index 8de6a4df6..00d043ad1 100644
--- a/tex/context/base/mlib-ctx.lua
+++ b/tex/context/base/mlib-ctx.lua
@@ -76,11 +76,11 @@ end
statistics.register("metapost processing time", function()
local n = metapost.n
if n > 0 then
- local e, t = metapost.externals.n, statistics.elapsedtime
+ local e, t = metapost.makempy.nofconverted, statistics.elapsedtime
local str = format("%s seconds, loading: %s seconds, execution: %s seconds, n: %s",
t(metapost), t(mplib), t(metapost.exectime), n)
if e > 0 then
- return format("%s, external: %s seconds (%s calls)", str, t(metapost.externals), e)
+ return format("%s, external: %s seconds (%s calls)", str, t(metapost.makempy), e)
else
return str
end
diff --git a/tex/context/base/mlib-pdf.lua b/tex/context/base/mlib-pdf.lua
index a4f7f3137..44d2367a1 100644
--- a/tex/context/base/mlib-pdf.lua
+++ b/tex/context/base/mlib-pdf.lua
@@ -10,6 +10,8 @@ local format, concat, gsub = string.format, table.concat, string.gsub
local texsprint = tex.sprint
local abs, sqrt, round = math.abs, math.sqrt, math.round
+local allocate = utilities.storage.allocate
+
local report_mplib = logs.new("mplib")
local mplib = mplib
@@ -24,6 +26,7 @@ local metapost = metapost
metapost.multipass = false
metapost.n = 0
metapost.optimize = true -- false
+metapost.specials = allocate()
--~ Because in MKiV we always have two passes, we save the objects. When an extra
--~ mp run is done (due to for instance texts identifier in the parse pass), we
@@ -225,8 +228,6 @@ end
metapost.flushnormalpath = flushnormalpath
-metapost.specials = metapost.specials or { }
-
-- we have two extension handlers, one for pre and postscripts, and one for colors
-- the flusher is pdf based, if another backend is used, we need to overload the
diff --git a/tex/context/base/mlib-pps.lua b/tex/context/base/mlib-pps.lua
index d930b8f9b..bcf94249f 100644
--- a/tex/context/base/mlib-pps.lua
+++ b/tex/context/base/mlib-pps.lua
@@ -17,6 +17,7 @@ local tonumber, type = tonumber, type
local lpegmatch = lpeg.match
local texbox = tex.box
local copy_list = node.copy_list
+local free_list = node.flush_list
local P, S, V, Cs = lpeg.P, lpeg.S, lpeg.V, lpeg.Cs
@@ -37,17 +38,14 @@ local cmyktogray = colors.cmyktogray or function() return 0 end
local mplib, lpdf = mplib, lpdf
-metapost = metapost or { }
local metapost = metapost
-
-metapost.specials = metapost.specials or { }
local specials = metapost.specials
specials.data = specials.data or { }
local data = specials.data
-metapost.externals = metapost.externals or { n = 0 }
-local externals = metapost.externals
+metapost.makempy = metapost.makempy or { nofconverted = 0 }
+local makempy = metapost.makempy
local colordata = { {}, {}, {}, {}, {} }
@@ -384,24 +382,28 @@ end
local current_format, current_graphic, current_initializations
--- metapost.first_box = metapost.first_box or 1000
--- metapost.last_box = metapost.last_box or 1100
---~ metapost.textext_current = metapost.first_box
-metapost.multipass = false
+metapost.multipass = false
-local textexts = { }
+local textexts = { }
+local scratchbox = 0
-local function free_boxes() -- todo: mp direct list ipv box
+local function freeboxes() -- todo: mp direct list ipv box
for n, box in next, textexts do
local tn = textexts[n]
if tn then
- -- somehow not flushed (used)
- textexts[n] = nil
+ free_list(tn)
+ -- texbox[scratchbox] = tn
+ -- texbox[scratchbox] = nil -- this frees too
+ if trace_textexts then
+ report_mplib("freeing textext %s",n)
+ end
end
end
textexts = { }
end
+metapost.resettextexts = freeboxes
+
function metapost.settext(box,slot)
textexts[slot] = copy_list(texbox[box])
texbox[box] = nil
@@ -412,35 +414,44 @@ end
function metapost.gettext(box,slot)
texbox[box] = copy_list(textexts[slot])
--- textexts[slot] = nil -- no, pictures can be placed several times
+ if trace_textexts then
+ report_mplib("putting textext %s in box %s",slot,box)
+ end
+ -- textexts[slot] = nil -- no, pictures can be placed several times
end
function specials.tf(specification,object)
---~ print("setting", metapost.textext_current)
local n, str = match(specification,"^(%d+):(.+)$")
if n and str then
n = tonumber(n)
- -- if metapost.textext_current < metapost.last_box then
- -- metapost.textext_current = metapost.first_box + n - 1
- -- end
if trace_textexts then
- -- report_mplib("first pass: order %s, box %s",n,metapost.textext_current)
- report_mplib("first pass: order %s",n)
+ report_mplib("setting textext %s (first pass)",n)
end
- -- sprint(ctxcatcodes,format("\\MPLIBsettext{%s}{%s}",metapost.textext_current,str))
sprint(ctxcatcodes,format("\\MPLIBsettext{%s}{%s}",n,str))
metapost.multipass = true
end
return { }, nil, nil, nil
end
+local factor = 65536*(7227/7200)
+
+function metapost.edefsxsy(wd,ht,dp) -- helper for figure
+ local hd = ht + dp
+ commands.edef("sx",(wd ~= 0 and factor/wd) or 0)
+ commands.edef("sy",(hd ~= 0 and factor/hd) or 0)
+end
+
+local function sxsy(wd,ht,dp) -- helper for text
+ local hd = ht + dp
+ return (wd ~= 0 and factor/wd) or 0, (hd ~= 0 and factor/hd) or 0
+end
+
function specials.ts(specification,object,result,flusher)
- -- print("getting", metapost.textext_current)
local n, str = match(specification,"^(%d+):(.+)$")
if n and str then
n = tonumber(n)
if trace_textexts then
- report_mplib("second pass: order %s",n)
+ report_mplib("processing textext %s (second pass)",n)
end
local op = object.path
local first, second, fourth = op[1], op[2], op[4]
@@ -453,20 +464,11 @@ function specials.ts(specification,object,result,flusher)
object.path = nil
end
local before = function() -- no need for before function (just do it directly)
- --~ flusher.flushfigure(result)
- --~ sprint(ctxcatcodes,format("\\MPLIBgettext{%f}{%f}{%f}{%f}{%f}{%f}{%s}",sx,rx,ry,sy,tx,ty,metapost.textext_current))
- --~ result = { }
result[#result+1] = format("q %f %f %f %f %f %f cm", sx,rx,ry,sy,tx,ty)
flusher.flushfigure(result)
- -- if metapost.textext_current < metapost.last_box then
- -- metapost.textext_current = metapost.first_box + n - 1
- -- end
- -- local b = metapost.textext_current
- -- local box = texbox[b]
local box = textexts[n]
if box then
- -- sprint(ctxcatcodes,format("\\MPLIBgettextscaled{%s}{%s}{%s}",b,metapost.sxsy(box.width,box.height,box.depth)))
- sprint(ctxcatcodes,format("\\MPLIBgettextscaled{%s}{%s}{%s}",n,metapost.sxsy(box.width,box.height,box.depth)))
+ sprint(ctxcatcodes,format("\\MPLIBgettextscaled{%s}{%s}{%s}",n,sxsy(box.width,box.height,box.depth)))
else
-- error
end
@@ -651,13 +653,13 @@ local function ignore(s)
return ""
end
-local parser = P {
- [1] = Cs((V(2)/register + V(4)/ignore + V(3)/convert + V(5)/force + 1)^0),
- [2] = ttex + gtex,
- [3] = btex * spacing * Cs(texmess) * etex,
- [4] = vtex * spacing * Cs(texmess) * etex,
- [5] = multipass, -- experimental, only for testing
-}
+-- local parser = P {
+-- [1] = Cs((V(2)/register + V(4)/ignore + V(3)/convert + V(5)/force + 1)^0),
+-- [2] = ttex + gtex,
+-- [3] = btex * spacing * Cs(texmess) * etex,
+-- [4] = vtex * spacing * Cs(texmess) * etex,
+-- [5] = multipass, -- experimental, only for testing
+-- }
-- currently a a one-liner produces less code
@@ -668,24 +670,11 @@ local parser = Cs((
+ 1
)^0)
-local function check_texts(str)
+local function checktexts(str)
found, forced = false, false
return lpegmatch(parser,str), found, forced
end
-local factor = 65536*(7227/7200)
-
-function metapost.edefsxsy(wd,ht,dp) -- helper for figure
- local hd = ht + dp
- commands.edef("sx",(wd ~= 0 and factor/wd) or 0)
- commands.edef("sy",(hd ~= 0 and factor/hd) or 0)
-end
-
-function metapost.sxsy(wd,ht,dp) -- helper for text
- local hd = ht + dp
- return (wd ~= 0 and factor/wd) or 0, (hd ~= 0 and factor/hd) or 0
-end
-
local no_trial_run = "_trial_run_ := false ;"
local do_trial_run = "if unknown _trial_run_ : boolean _trial_run_ fi ; _trial_run_ := true ;"
local text_data_template = "_tt_w_[%i]:=%f;_tt_h_[%i]:=%f;_tt_d_[%i]:=%f;"
@@ -695,20 +684,17 @@ local do_safeguard = ";"
function metapost.texttextsdata()
local t, n = { }, 0
---~ for i = metapost.first_box, metapost.last_box do
---~ n = n + 1
---~ local box = texbox[i]
for n, box in next, textexts do
- if trace_textexts then
- report_mplib("passed data: order %s",n)
- end
if box then
- t[#t+1] = format(text_data_template,n,box.width/factor,n,box.height/factor,n,box.depth/factor)
+ local wd, ht, dp = box.width/factor, box.height/factor, box.depth/factor
+ if trace_textexts then
+ report_mplib("passed textext data %s: (%0.4f,%0.4f,%0.4f)",n,wd,ht,dp)
+ end
+ t[#t+1] = format(text_data_template,n,wd,n,ht,n,dp)
else
break
end
end
---~ print(table.serialize(t))
return t
end
@@ -721,13 +707,12 @@ metapost.method = 1 -- 1:dumb 2:clever
function metapost.graphic_base_pass(mpsformat,str,initializations,preamble,askedfig)
local nofig = (askedfig and "") or false
local done_1, done_2, forced_1, forced_2
- str, done_1, forced_1 = check_texts(str)
+ str, done_1, forced_1 = checktexts(str)
if not preamble or preamble == "" then
preamble, done_2, forced_2 = "", false, false
else
- preamble, done_2, forced_2 = check_texts(preamble)
+ preamble, done_2, forced_2 = checktexts(preamble)
end
- -- metapost.textext_current = metapost.first_box
metapost.intermediate.needed = false
metapost.multipass = false -- no needed here
current_format, current_graphic, current_initializations = mpsformat, str, initializations or ""
@@ -764,13 +749,10 @@ function metapost.graphic_base_pass(mpsformat,str,initializations,preamble,asked
nofig or do_end_fig
}, false, nil, false, false, askedfig )
end
- -- here we could free the textext boxes
- free_boxes()
end
function metapost.graphic_extra_pass(askedfig)
local nofig = (askedfig and "") or false
- -- metapost.textext_current = metapost.first_box
metapost.process(current_format, {
nofig or do_begin_fig,
no_trial_run,
@@ -780,24 +762,17 @@ function metapost.graphic_extra_pass(askedfig)
current_graphic,
nofig or do_end_fig
}, false, nil, false, true, askedfig )
+ sprint(ctxcatcodes,format("\\ctxlua{metapost.resettextexts()}")) -- must happen afterwards
end
-local graphics = { }
local start = [[\starttext]]
local preamble = [[\long\def\MPLIBgraphictext#1{\startTEXpage[scale=10000]#1\stopTEXpage}]]
local stop = [[\stoptext]]
-function specials.gt(specification,object) -- number, so that we can reorder
- graphics[#graphics+1] = format("\\MPLIBgraphictext{%s}",specification)
- metapost.intermediate.needed = true
- metapost.multipass = true
- return { }, nil, nil, nil
-end
-
-function metapost.intermediate.actions.makempy()
+function makempy.processgraphics(graphics)
if #graphics > 0 then
- externals.n = externals.n + 1
- starttiming(externals)
+ makempy.nofconverted = makempy.nofconverted + 1
+ starttiming(makempy)
local mpofile = tex.jobname .. "-mpgraph"
local mpyfile = file.replacesuffix(mpofile,"mpy")
local pdffile = file.replacesuffix(mpofile,"pdf")
@@ -817,7 +792,22 @@ function metapost.intermediate.actions.makempy()
io.savedata(mpyfile,concat(result,""))
end
end
- stoptiming(externals)
+ stoptiming(makempy)
+ end
+end
+
+local graphics = { }
+
+function specials.gt(specification,object) -- number, so that we can reorder
+ graphics[#graphics+1] = format("\\MPLIBgraphictext{%s}",specification)
+ metapost.intermediate.needed = true
+ metapost.multipass = true
+ return { }, nil, nil, nil
+end
+
+function metapost.intermediate.actions.makempy()
+ if #graphics > 0 then
+ makempy.processgraphics(graphics)
graphics = { } -- ?
end
end
diff --git a/tex/context/base/mlib-pps.mkiv b/tex/context/base/mlib-pps.mkiv
index a27eb56df..2eb0ccad4 100644
--- a/tex/context/base/mlib-pps.mkiv
+++ b/tex/context/base/mlib-pps.mkiv
@@ -43,11 +43,7 @@
\def\MPLIBsettext#1% #2%
{\dowithnextbox{\ctxlua{metapost.settext(\number\nextbox,#1)}}\hbox}
-% \def\MPLIBgettextscaled#1#2#3% why a copy
-% {\ctxlua{metapost.gettext(\number\MPtextbox,#1)}% \black has no use here (applied to box)
-% \vbox to \zeropoint{\vss\hbox to \zeropoint{\black\scale[sx=#2,sy=#3]{\raise\dp\MPtextbox\box\MPtextbox}\hss}}}
-
-\def\MPLIBgettextscaled#1#2#3% why a copy
+\def\MPLIBgettextscaled#1#2#3% why a copy .. can be used more often
{\ctxlua{metapost.gettext(\number\MPtextbox,#1)}% we need the colorhack or else the color backend does not sync
\vbox to \zeropoint{\vss\hbox to \zeropoint{\scale[\c!sx=#2,\c!sy=#3]{\raise\dp\MPtextbox\box\MPtextbox}\forcecolorhack\hss}}}
@@ -55,20 +51,3 @@
{\startTEXpage[\c!scale=10000]#1\stopTEXpage}
\protect \endinput
-
-% \def\MPLIBsettext#1% #2% we could as well store in hlists at the lua end i.e. just one box
-% {\global\setbox#1\hbox}
-%
-% \def\MPLIBfreetext#1%
-% {\global\setbox#1\emptybox}
-%
-% \def\MPLIBgettextscaled#1#2#3% why a copy
-% {\vbox to \zeropoint{\vss\hbox to \zeropoint{\black\scale[sx=#2,sy=#3]{\raise\dp#1\copy#1}\hss}}}
-%
-% \def\MPLIBallocate#1%
-% {\newbox\MPLIBfirst
-% \dorecurse{\numexpr#1-1\relax}{\let\MPLIBlast\relax\newbox\MPLIBlast}%
-% \MPLIBregister}
-%
-% \def\MPLIBregister % after allocate!
-% {\ctxlua{metapost.first_box, metapost.last_box = \number\MPLIBfirst, \number\MPLIBlast}}
diff --git a/tex/context/base/mlib-run.lua b/tex/context/base/mlib-run.lua
index c7b24f0f7..25c1cb022 100644
--- a/tex/context/base/mlib-run.lua
+++ b/tex/context/base/mlib-run.lua
@@ -63,7 +63,7 @@ local function finder(name, mode, ftype)
elseif file.is_qualified_path(name) then
return name
else
- return resolvers.find_file(name,ftype)
+ return resolvers.findfile(name,ftype)
end
end
diff --git a/tex/context/base/mult-chk.lua b/tex/context/base/mult-chk.lua
index 46b67e864..5df2efd79 100644
--- a/tex/context/base/mult-chk.lua
+++ b/tex/context/base/mult-chk.lua
@@ -12,9 +12,11 @@ local type = type
local texsprint, ctxcatcodes = tex.sprint, tex.ctxcatcodes
local make_settings_to_hash_pattern, settings_to_set = utilities.parsers.make_settings_to_hash_pattern, utilities.parsers.settings_to_set
+local allocate = utilities.storage.allocate
+
interfaces = interfaces or { }
-interfaces.syntax = {
+interfaces.syntax = allocate {
test = { keys = table.tohash { "a","b","c","d","e","f","g" } }
}
diff --git a/tex/context/base/mult-cld.lua b/tex/context/base/mult-cld.lua
index 7244b0d7e..3d3194b69 100644
--- a/tex/context/base/mult-cld.lua
+++ b/tex/context/base/mult-cld.lua
@@ -108,6 +108,16 @@ function context.trace(intercept)
context.trace = function() end
end
+function context.getflush()
+ return flush
+end
+
+function context.setflush(newflush)
+ local oldflush = flush
+ flush = newflush
+ return oldflush
+end
+
trackers.register("context.flush", function(v) if v then context.trace() end end)
trackers.register("context.intercept", function(v) if v then context.trace(true) end end)
@@ -276,3 +286,52 @@ function context.disabletrackers(str) trackers.disable(str) end
-- context.stopchapter(true)
--
-- context.stoptext(true)
+
+--~ Not that useful yet. Maybe something like this when the main loop
+--~ is a coroutine. It also does not help taking care of nested calls.
+--~ Even worse, it interferes with other mechanisms usign context calls.
+--~
+--~ local create, yield, resume = coroutine.create, coroutine.yield, coroutine.resume
+--~ local getflush, setflush = context.getflush, context.setflush
+--~ local texsprint, ctxcatcodes = tex.sprint, tex.ctxcatcodes
+--~
+--~ function context.direct(f)
+--~ local routine = create(f)
+--~ local oldflush = getflush()
+--~ function newflush(...)
+--~ oldflush(...)
+--~ yield(true)
+--~ end
+--~ setflush(newflush)
+--~
+--~ -- local function resumecontext()
+--~ -- local done = resume(routine)
+--~ -- if not done then
+--~ -- return
+--~ -- end
+--~ -- resumecontext() -- stack overflow ... no tail recursion
+--~ -- end
+--~ -- context.resume = resumecontext
+--~ -- texsprint(ctxcatcodes,"\\ctxlua{context.resume()}")
+--~
+--~ local function resumecontext()
+--~ local done = resume(routine)
+--~ if not done then
+--~ return
+--~ end
+--~ -- texsprint(ctxcatcodes,"\\exitloop")
+--~ texsprint(ctxcatcodes,"\\ctxlua{context.resume()}") -- can be simple macro call
+--~ end
+--~ context.resume = resumecontext
+--~ -- texsprint(ctxcatcodes,"\\doloop{\\ctxlua{context.resume()}}") -- can be fast loop at the tex end
+--~ texsprint(ctxcatcodes,"\\ctxlua{context.resume()}")
+--~
+--~ end
+--~
+--~ function something()
+--~ context("\\setbox0")
+--~ context("\\hbox{hans hagen xx}")
+--~ context("\\the\\wd0/\\box0")
+--~ end
+--~
+--~ context.direct(something)
diff --git a/tex/context/base/node-dir.lua b/tex/context/base/node-dir.lua
index f168c7f2b..970313d96 100644
--- a/tex/context/base/node-dir.lua
+++ b/tex/context/base/node-dir.lua
@@ -15,23 +15,25 @@ adapted and now has the mappings as comments. This lua file is
based on that file.
]]--
+local allocate = utilities.storage.allocate
+
local nodes = nodes
-nodes.is_mirrored = {
+nodes.is_mirrored = allocate {
-- TLT = false,
-- TRT = false,
-- LTL = false,
-- RTT = false,
}
-nodes.is_rotated = {
+nodes.is_rotated = allocate {
-- TLT = false,
-- TRT = false,
-- LTL = false,
RTT = true, ["+RTT"] = true,
}
-nodes.textdir_is_parallel = {
+nodes.textdir_is_parallel = allocate {
TLT = {
TLT = true, ["+TLT"] = true,
TRT = true, ["+TRT"] = true,
@@ -58,7 +60,7 @@ nodes.textdir_is_parallel = {
}
}
-nodes.pardir_is_parallel = {
+nodes.pardir_is_parallel = allocate {
TLT = {
TLT = true, ["+TLT"] = true,
TRT = true, ["+TRT"] = true,
@@ -85,7 +87,7 @@ nodes.pardir_is_parallel = {
},
}
-nodes.pardir_is_opposite = {
+nodes.pardir_is_opposite = allocate {
TLT = {
-- TLT = false,
-- TRT = false,
@@ -112,7 +114,7 @@ nodes.pardir_is_opposite = {
},
}
-nodes.textdir_is_opposite = {
+nodes.textdir_is_opposite = allocate {
TLT = {
-- TLT = false,
TRT = true, ["+TRT"] = true,
@@ -139,7 +141,7 @@ nodes.textdir_is_opposite = {
},
}
-nodes.glyphdir_is_opposite = {
+nodes.glyphdir_is_opposite = allocate {
TLT = {
-- TLT = false,
-- TRT = false,
@@ -166,7 +168,7 @@ nodes.glyphdir_is_opposite = {
},
}
-nodes.pardir_is_equal = {
+nodes.pardir_is_equal = allocate {
TLT = {
TLT = true, ["+TLT"] = true,
TRT = true, ["+TRT"] = true,
@@ -193,7 +195,7 @@ nodes.pardir_is_equal = {
},
}
-nodes.textdir_is_equal = {
+nodes.textdir_is_equal = allocate {
TLT = {
TLT = true, ["+TLT"] = true,
-- TRT = false,
@@ -220,7 +222,7 @@ nodes.textdir_is_equal = {
},
}
-nodes.glyphdir_is_equal = {
+nodes.glyphdir_is_equal = allocate {
TLT = {
TLT = true, ["+TLT"] = true,
TRT = true, ["+TRT"] = true,
@@ -247,7 +249,7 @@ nodes.glyphdir_is_equal = {
},
}
-nodes.partextdir_is_equal = {
+nodes.partextdir_is_equal = allocate {
TLT = {
-- TLT = false,
-- TRT = false,
@@ -274,28 +276,28 @@ nodes.partextdir_is_equal = {
},
}
-nodes.textdir_is_is = {
+nodes.textdir_is_is = allocate {
TLT = true, ["+TLT"] = true,
-- TRT = false,
-- LTL = false,
-- RTT = false,
}
-nodes.glyphdir_is_orthogonal = {
+nodes.glyphdir_is_orthogonal = allocate {
TLT = true, ["+TLT"] = true,
TRT = true, ["+TRT"] = true,
LTL = true, ["+LTL"] = true,
-- RTT = false
}
-nodes.dir_is_pop = {
+nodes.dir_is_pop = allocate {
["-TRT"] = true,
["-TLT"] = true,
["-LTL"] = true,
["-RTT"] = true,
}
-nodes.dir_negation = {
+nodes.dir_negation = allocate {
["-TRT"] = "+TRT",
["-TLT"] = "+TLT",
["-LTL"] = "+LTL",
diff --git a/tex/context/base/node-dum.lua b/tex/context/base/node-dum.lua
index 831c0a842..7a454c8c9 100644
--- a/tex/context/base/node-dum.lua
+++ b/tex/context/base/node-dum.lua
@@ -36,7 +36,7 @@ local fontdata = fonts.ids or { }
function nodes.simple_font_handler(head)
-- lang.hyphenate(head)
head = nodes.handlers.characters(head)
- nodes.handlers.injectkerns(head)
+ nodes.injections.handler(head)
nodes.handlers.protectglyphs(head)
head = node.ligaturing(head)
head = node.kerning(head)
diff --git a/tex/context/base/node-ini.lua b/tex/context/base/node-ini.lua
index 5af780264..2fb3fe9c6 100644
--- a/tex/context/base/node-ini.lua
+++ b/tex/context/base/node-ini.lua
@@ -58,6 +58,8 @@ local traverse, traverse_id = node.traverse, node.traverse_id
local free_node, remove_node = node.free, node.remove
local insert_node_before, insert_node_after = node.insert_before, node.insert_after
+local allocate = utilities.storage.allocate
+
nodes = nodes or { }
local nodes = nodes
@@ -65,7 +67,7 @@ nodes.handlers = nodes.handlers or { }
-- there will be more of this:
-local skipcodes = {
+local skipcodes = allocate {
[ 0] = "userskip",
[ 1] = "lineskip",
[ 2] = "baselineskip",
@@ -87,7 +89,7 @@ local skipcodes = {
[18] = "thickmuskip",
}
-local noadcodes = {
+local noadcodes = allocate {
[ 0] = "ord",
[ 1] = "opdisplaylimits",
[ 2] = "oplimits",
@@ -103,7 +105,7 @@ local noadcodes = {
[12] = "vcenter",
}
-local listcodes = {
+local listcodes = allocate {
[ 0] = "unknown",
[ 1] = "line",
[ 2] = "box",
@@ -112,7 +114,7 @@ local listcodes = {
[ 5] = "cell",
}
-local glyphcodes = {
+local glyphcodes = allocate {
[0] = "character",
[1] = "glyph",
[2] = "ligature",
@@ -121,18 +123,18 @@ local glyphcodes = {
[5] = "right",
}
-local kerncodes = {
+local kerncodes = allocate {
[0] = "fontkern",
[1] = "userkern",
[2] = "accentkern",
}
-local mathcodes = {
+local mathcodes = allocate {
[0] = "beginmath",
[1] = "endmath",
}
-local fillcodes = {
+local fillcodes = allocate {
[0] = "stretch",
[1] = "fi",
[2] = "fil",
@@ -151,15 +153,15 @@ end
local nodecodes = simplified(node.types())
local whatcodes = simplified(node.whatsits())
-skipcodes = swapped(skipcodes, skipcodes)
-noadcodes = swapped(noadcodes, noadcodes)
-nodecodes = swapped(nodecodes, nodecodes)
-whatcodes = swapped(whatcodes, whatcodes)
-listcodes = swapped(listcodes, listcodes)
-glyphcodes = swapped(glyphcodes,glyphcodes)
-kerncodes = swapped(kerncodes, kerncodes)
-mathcodes = swapped(mathcodes, mathcodes)
-fillcodes = swapped(fillcodes, fillcodes)
+skipcodes = allocate(swapped(skipcodes, skipcodes ))
+noadcodes = allocate(swapped(noadcodes, noadcodes ))
+nodecodes = allocate(swapped(nodecodes, nodecodes ))
+whatcodes = allocate(swapped(whatcodes, whatcodes ))
+listcodes = allocate(swapped(listcodes, listcodes ))
+glyphcodes = allocate(swapped(glyphcodes,glyphcodes))
+kerncodes = allocate(swapped(kerncodes, kerncodes ))
+mathcodes = allocate(swapped(mathcodes, mathcodes ))
+fillcodes = allocate(swapped(fillcodes, fillcodes ))
nodes.skipcodes = skipcodes nodes.gluecodes = skipcodes -- more official
nodes.noadcodes = noadcodes
@@ -177,7 +179,7 @@ listcodes.column = listcodes.alignment
kerncodes.italiccorrection = kerncodes.userkern
kerncodes.kerning = kerncodes.fontkern
-nodes.codes = {
+nodes.codes = allocate {
hlist = listcodes,
vlist = listcodes,
glyph = glyphcodes,
diff --git a/tex/context/base/node-inj.lua b/tex/context/base/node-inj.lua
index 527df7ca9..55fab6e75 100644
--- a/tex/context/base/node-inj.lua
+++ b/tex/context/base/node-inj.lua
@@ -25,6 +25,9 @@ fonts = fonts or { }
fonts.tfm = fonts.tfm or { }
fonts.ids = fonts.ids or { }
+nodes.injections = nodes.injections or { }
+local injections = nodes.injections
+
local fontdata = fonts.ids
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
@@ -57,7 +60,7 @@ local kerns = { }
-- for the moment we pass the r2l key ... volt/arabtype tests
-function nodes.set_cursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
+function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2])
local ws, wn = tfmstart.width, tfmnext.width
local bound = #cursives + 1
@@ -67,7 +70,7 @@ function nodes.set_cursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
return dx, dy, bound
end
-function nodes.set_pair(current,factor,rlmode,r2lflag,spec,tfmchr)
+function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4]
-- dy = y - h
if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then
@@ -86,7 +89,7 @@ function nodes.set_pair(current,factor,rlmode,r2lflag,spec,tfmchr)
return x, y, w, h -- no bound
end
-function nodes.set_kern(current,factor,rlmode,x,tfmchr)
+function injections.setkern(current,factor,rlmode,x,tfmchr)
local dx = factor*x
if dx ~= 0 then
local bound = #kerns + 1
@@ -98,7 +101,7 @@ function nodes.set_kern(current,factor,rlmode,x,tfmchr)
end
end
-function nodes.set_mark(start,base,factor,rlmode,ba,ma,index) --ba=baseanchor, ma=markanchor
+function injections.setmark(start,base,factor,rlmode,ba,ma,index) --ba=baseanchor, ma=markanchor
local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2])
local bound = has_attribute(base,markbase)
if bound then
@@ -126,7 +129,7 @@ local function dir(n)
return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
end
-function nodes.trace_injection(head)
+local function trace(head)
report_injections("begin run")
for n in traverse_id(glyph_code,head) do
if n.subtype < 256 then
@@ -177,12 +180,12 @@ end
-- todo: reuse tables (i.e. no collection), but will be extra fields anyway
-- todo: check for attribute
-function nodes.handlers.injectkerns(head,where,keep)
+function injections.handler(head,where,keep)
local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns)
if has_marks or has_cursives then
--~ if has_marks or has_cursives or has_kerns then
if trace_injections then
- nodes.trace_injection(head)
+ trace(head)
end
-- in the future variant we will not copy items but refs to tables
local done, ky, rl, valid, cx, wx, mk = false, { }, { }, { }, { }, { }, { }
@@ -386,7 +389,7 @@ function nodes.handlers.injectkerns(head,where,keep)
end
elseif has_kerns then
if trace_injections then
- nodes.trace_injection(head)
+ trace(head)
end
for n in traverse_id(glyph_code,head) do
if n.subtype < 256 then
diff --git a/tex/context/base/node-ref.lua b/tex/context/base/node-ref.lua
index 40dd82c4d..0473e4523 100644
--- a/tex/context/base/node-ref.lua
+++ b/tex/context/base/node-ref.lua
@@ -14,17 +14,19 @@ if not modules then modules = { } end modules ['node-bck'] = {
-- helper, will end up in luatex
+-- is grouplevel still used?
+
local cleanupreferences, cleanupdestinations = false, true
local attributes, nodes, node = attributes, nodes, node
-local nodeinjections = backends.nodeinjections
-local codeinjections = backends.codeinjections
+local nodeinjections = backends.nodeinjections
+local codeinjections = backends.codeinjections
-local transparencies = attributes.transparencies
-local colors = attributes.colors
-local references = structures.references
-local tasks = nodes.tasks
+local transparencies = attributes.transparencies
+local colors = attributes.colors
+local references = structures.references
+local tasks = nodes.tasks
local hpack_list = node.hpack
local list_dimensions = node.dimensions
@@ -211,7 +213,8 @@ local function inject_areas(head,attribute,make,stack,done,skip,parent,pardir,tx
elseif subtype == dir_code then
txtdir = current.dir
end
-elseif id == glue_code and current.subtype == leftskip_code then -- any glue at the left?
+ elseif id == glue_code and current.subtype == leftskip_code then -- any glue at the left?
+ --
elseif id == hlist_code or id == vlist_code then
if not reference and r and (not skip or r > skip) then
inject_list(id,current,r,make,stack,pardir,txtdir)
@@ -340,24 +343,34 @@ local texcount = tex.count
-- references:
+local stack = { }
+local done = { }
+local attribute = attributes.private('reference')
+local nofreferences = 0
+local topofstack = 0
+
nodes.references = {
- attribute = attributes.private('reference'),
- stack = { },
- done = { },
+ attribute = attribute,
+ stack = stack,
+ done = done,
}
-local stack, done, attribute = nodes.references.stack, nodes.references.done, nodes.references.attribute
+-- todo: get rid of n (n is just a number, can be used for tracing, obsolete)
-local nofreferences, topofstack = 0, 0
-
-local function setreference(n,h,d,r) -- n is just a number, can be used for tracing
+local function setreference(n,h,d,r)
topofstack = topofstack + 1
- stack[topofstack] = { n, h, d, codeinjections.prerollreference(r) } -- the preroll permits us to determine samepage (but delayed also has some advantages)
---~ texattribute[attribute] = topofstack -- todo -> at tex end
+ -- the preroll permits us to determine samepage (but delayed also has some advantages)
+ -- so some part of the backend work is already done here
+--~ stack[topofstack] = { n, h, d, codeinjections.prerollreference(r) }
+ stack[topofstack] = { r, h, d, codeinjections.prerollreference(r) }
+ -- texattribute[attribute] = topofstack -- todo -> at tex end
texcount.lastreferenceattribute = topofstack
end
-nodes.setreference = setreference
+function references.get(n) -- not public so functionality can change
+ local sn = stack[n]
+ return sn and sn[1]
+end
local function makereference(width,height,depth,reference)
local sr = stack[reference]
@@ -407,24 +420,24 @@ end
-- destinations (we can clean up once set!)
+local stack = { }
+local done = { }
+local attribute = attributes.private('destination')
+local nofdestinations = 0
+local topofstack = 0
+
nodes.destinations = {
- attribute = attributes.private('destination'),
- stack = { },
- done = { },
+ attribute = attribute,
+ stack = stack,
+ done = done,
}
-local stack, done, attribute = nodes.destinations.stack, nodes.destinations.done, nodes.destinations.attribute
-
-local nofdestinations, topofstack = 0, 0
-
local function setdestination(n,h,d,name,view) -- n = grouplevel, name == table
topofstack = topofstack + 1
stack[topofstack] = { n, h, d, name, view }
return topofstack
end
-nodes.setdestination = setdestination
-
local function makedestination(width,height,depth,reference)
local sr = stack[reference]
if sr then
diff --git a/tex/context/base/node-rul.lua b/tex/context/base/node-rul.lua
index a8f59c37e..a0a18590b 100644
--- a/tex/context/base/node-rul.lua
+++ b/tex/context/base/node-rul.lua
@@ -20,7 +20,7 @@ local glyph_code = nodecodes.glyph
local disc_code = nodecodes.disc
local rule_code = nodecodes.rule
-function nodes.strip_range(first,last) -- todo: dir
+function nodes.striprange(first,last) -- todo: dir
if first and last then -- just to be sure
if first == last then
return first, last
@@ -60,14 +60,14 @@ local trace_ruled = false trackers.register("nodes.ruled", function(v) trace_ru
local report_ruled = logs.new("ruled")
local floor = math.floor
-local n_tostring, n_tosequence = nodes.ids_tostring, nodes.tosequence
+local n_tostring, n_tosequence = nodes.idstostring, nodes.tosequence
local a_ruled = attributes.private('ruled')
local a_color = attributes.private('color')
local a_transparency = attributes.private('transparency')
local a_colorspace = attributes.private('colormodel')
-local insert_before, insert_after, strip_range = node.insert_before, node.insert_after, nodes.strip_range
+local insert_before, insert_after, striprange = node.insert_before, node.insert_after, nodes.striprange
local list_dimensions, has_attribute, set_attribute = node.dimensions, node.has_attribute, node.set_attribute
local hpack_nodes = node.hpack
local dimenfactor = fonts.dimenfactor
@@ -119,7 +119,7 @@ local checkdir = true
-- this one needs to take layers into account (i.e. we need a list of
-- critical attributes)
--- omkeren class en level -> scheelt functie call in analyse
+-- omkeren class en level -> scheelt functie call in analyze
local function processwords(attribute,data,flush,head,parent) -- we have hlistdir and local dir
local n = head
@@ -234,11 +234,11 @@ local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but a
if strip then
if trace_ruled then
local before = n_tosequence(f,l,true)
- f, l = strip_range(f,l)
+ f, l = striprange(f,l)
local after = n_tosequence(f,l,true)
report_ruled("range stripper: %s -> %s",before,after)
else
- f, l = strip_range(f,l)
+ f, l = striprange(f,l)
end
end
if not f then
@@ -330,7 +330,7 @@ end
local function flush_shifted(head,first,last,data,level,parent,strip) -- not that fast but acceptable for this purpose
if true then
- first, last = strip_range(first,last)
+ first, last = striprange(first,last)
end
local prev, next = first.prev, last.next
first.prev, last.next = nil, nil
diff --git a/tex/context/base/node-ser.lua b/tex/context/base/node-ser.lua
index 0ef074397..c7bc53a3a 100644
--- a/tex/context/base/node-ser.lua
+++ b/tex/context/base/node-ser.lua
@@ -11,6 +11,8 @@ if not modules then modules = { } end modules ['node-ser'] = {
local type, format, concat, rep = type, string.format, table.concat, string.rep
+local allocate = utilities.storage.allocate
+
local ctxcatcodes = tex.ctxcatcodes
local nodes, node = nodes, node
@@ -24,7 +26,7 @@ local nodecodes = nodes.nodecodes
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
-local expand = table.tohash {
+local expand = allocate ( table.tohash {
"list", -- list_ptr & ins_ptr & adjust_ptr
"pre", --
"post", --
@@ -39,25 +41,25 @@ local expand = table.tohash {
"leader", -- leader_ptr
"action", -- action_ptr
"value", -- user_defined nodes with subtype 'a' en 'n'
-}
+} )
-- page_insert: "height", "last_ins_ptr", "best_ins_ptr"
-- split_insert: "height", "last_ins_ptr", "best_ins_ptr", "broken_ptr", "broken_ins"
-local ignore = table.tohash {
+local ignore = allocate ( table.tohash {
"page_insert",
"split_insert",
"ref_count",
-}
+} )
-local dimension = table.tohash {
+local dimension = allocate ( table.tohash {
"width", "height", "depth", "shift",
"stretch", "shrink",
"xoffset", "yoffset",
"surround",
"kern",
"box_left_width", "box_right_width"
-}
+} )
-- flat: don't use next, but indexes
-- verbose: also add type
@@ -264,17 +266,3 @@ function nodes.print(head,n)
head = head.next
end
end
-
-function nodes.checkforleaks(sparse)
- local l = { }
- local q = node.usedlist()
- for p in traverse(q) do
- local s = table.serialize(nodes.astable(p,sparse),node_type(p.id))
- l[s] = (l[s] or 0) + 1
- end
- node.flush_list(q)
- for k, v in next, l do
- texio.write_nl(format("%s * %s", v, k))
- end
-end
-
diff --git a/tex/context/base/node-spl.lua b/tex/context/base/node-spl.lua
index bd641817c..8d8c297bd 100644
--- a/tex/context/base/node-spl.lua
+++ b/tex/context/base/node-spl.lua
@@ -24,6 +24,7 @@ local utfchar = utf.char
local random = math.random
local variables = interfaces.variables
local settings_to_array, settings_to_hash = utilities.parsers.settings_to_array, utilities.parsers.settings_to_hash
+local fcs = fonts.colors.set
local trace_split = false trackers.register("builders.paragraphs.solutions.splitters.splitter", function(v) trace_split = v end)
local trace_optimize = false trackers.register("builders.paragraphs.solutions.splitters.optimizer", function(v) trace_optimize = v end)
@@ -73,7 +74,7 @@ local new_usernumber = nodepool.usernumber
local starttiming = statistics.starttiming
local stoptiming = statistics.stoptiming
local process_characters = nodes.handlers.characters
-local inject_kerns = nodes.handlers.injectkerns
+local inject_kerns = nodes.injections.handler
local fontdata = fonts.ids
local parbuilders = builders.paragraphs
@@ -110,17 +111,18 @@ function splitters.setup(setups)
criterium = tonumber(setups.criterium) or criterium
end
+local contextsetups = fonts.definers.specifiers.contextsetups
+
local function convert(featuresets,name,set,what)
local list, numbers = set[what], { }
if list then
- local setups = fonts.define.specify.context_setups
for i=1,#list do
local feature = list[i]
local fs = featuresets[feature]
local fn = fs and fs.number
if not fn then
-- fall back on global features
- fs = setups[feature]
+ fs = contextsetups[feature]
fn = fs and fs.number
end
if fn then
@@ -154,7 +156,6 @@ end
fonts.goodies.register("solutions",initialize)
function splitters.define(name,parameters)
- local setups = fonts.define.specify.context_setups
local settings = settings_to_hash(parameters) -- todo: interfacing
local goodies, solution, less, more = settings.goodies, settings.solution, settings.less, settings.more
local less_set, more_set
@@ -179,7 +180,7 @@ function splitters.define(name,parameters)
else
if l then
for i=1,#l do
- local ss = setups[l[i]]
+ local ss = contextsetups[l[i]]
if ss then
less_set[#less_set+1] = ss.number
end
@@ -187,7 +188,7 @@ function splitters.define(name,parameters)
end
if m then
for i=1,#m do
- local ss = setups[m[i]]
+ local ss = contextsetups[m[i]]
if ss then
more_set[#more_set+1] = ss.number
end
@@ -207,8 +208,6 @@ function splitters.define(name,parameters)
tex.write(#solutions)
end
-local fcs = (fonts.color and fonts.color.set) or function() end
-
local nofwords, noftries, nofadapted, nofkept, nofparagraphs = 0, 0, 0, 0, 0
function splitters.split(head)
diff --git a/tex/context/base/node-tra.lua b/tex/context/base/node-tra.lua
index dd6a49d4b..c5cf04126 100644
--- a/tex/context/base/node-tra.lua
+++ b/tex/context/base/node-tra.lua
@@ -15,6 +15,7 @@ local utfchar = utf.char
local concat = table.concat
local format, match, gmatch, concat, rep = string.format, string.match, string.gmatch, table.concat, string.rep
local lpegmatch = lpeg.match
+local write_nl = texio.write_nl
local ctxcatcodes = tex.ctxcatcodes
@@ -38,6 +39,9 @@ local tasks = nodes.tasks
nodes.handlers = nodes.handlers or { }
local handlers = nodes.handlers
+nodes.injections = nodes.injections or { }
+local injections = nodes.injections
+
tracers.characters = tracers.characters or { }
tracers.steppers = tracers.steppers or { }
@@ -312,7 +316,7 @@ function step_tracers.check(head)
if collecting then
step_tracers.reset()
local n = copy_node_list(head)
- handlers.injectkerns(n,nil,"trace",true)
+ injections.handler(n,nil,"trace",true)
handlers.protectglyphs(n) -- can be option
collection[1] = n
end
@@ -323,7 +327,7 @@ function step_tracers.register(head)
local nc = #collection+1
if messages[nc] then
local n = copy_node_list(head)
- handlers.injectkerns(n,nil,"trace",true)
+ injections.handler(n,nil,"trace",true)
handlers.protectglyphs(n) -- can be option
collection[nc] = n
end
@@ -343,16 +347,16 @@ end
-- this will be reorganized:
-function nodes.show_list(head, message)
+function nodes.showlist(head, message)
if message then
- texio.write_nl(message)
+ write_nl(message)
end
for n in traverse_nodes(head) do
- texio.write_nl(tostring(n))
+ write_nl(tostring(n))
end
end
-function nodes.checkglyphs(head,message)
+function nodes.handlers.checkglyphs(head,message)
local t = { }
for g in traverse_id(glyph_code,head) do
t[#t+1] = format("U+%04X:%s",g.char,g.subtype)
@@ -363,6 +367,19 @@ function nodes.checkglyphs(head,message)
return false
end
+function nodes.handlers.checkforleaks(sparse)
+ local l = { }
+ local q = node.usedlist()
+ for p in traverse(q) do
+ local s = table.serialize(nodes.astable(p,sparse),node_type(p.id))
+ l[s] = (l[s] or 0) + 1
+ end
+ node.flush_list(q)
+ for k, v in next, l do
+ write_nl(format("%s * %s", v, k))
+ end
+end
+
local function tosequence(start,stop,compact)
if start then
local t = { }
@@ -417,13 +434,13 @@ function nodes.report(t,done)
if status.output_active then
report_nodes("output, changed, %s nodes",nodes.count(t))
else
- texio.write("nodes","normal, changed, %s nodes",nodes.count(t))
+ write_nl("nodes","normal, changed, %s nodes",nodes.count(t))
end
else
if status.output_active then
report_nodes("output, unchanged, %s nodes",nodes.count(t))
else
- texio.write("nodes","normal, unchanged, %s nodes",nodes.count(t))
+ write_nl("nodes","normal, unchanged, %s nodes",nodes.count(t))
end
end
end
@@ -436,7 +453,7 @@ function nodes.packlist(head)
return t
end
-function nodes.ids_to_string(head,tail)
+function nodes.idstostring(head,tail)
local t, last_id, last_n = { }, nil, 0
for n in traverse_nodes(head,tail) do -- hm, does not stop at tail
local id = n.id
@@ -466,11 +483,9 @@ function nodes.ids_to_string(head,tail)
return concat(t," ")
end
-nodes.ids_tostring = nodes.ids_to_string
-
-local function show_simple_list(h,depth,n)
+local function showsimplelist(h,depth,n)
while h do
- texio.write_nl(rep(" ",n) .. tostring(h))
+ write_nl(rep(" ",n) .. tostring(h))
if not depth or n < depth then
local id = h.id
if id == hlist_code or id == vlist_code then
@@ -510,39 +525,44 @@ end
local what = { [0] = "unknown", "line", "box", "indent", "row", "cell" }
-local function show_boxes(n,symbol,depth)
+local function showboxes(n,symbol,depth)
depth, symbol = depth or 0, symbol or "."
for n in traverse_nodes(n) do
local id = n.id
if id == hlist_code or id == vlist_code then
local s = n.subtype
logs.simple(rep(symbol,depth) .. what[s] or s)
- show_boxes(n.list,symbol,depth+1)
+ showboxes(n.list,symbol,depth+1)
end
end
end
-nodes.show_boxes = show_boxes
+nodes.showboxes = showboxes
local threshold = 65536
-local function toutf(list,result)
+local function toutf(list,result,stopcriterium)
for n in traverse_nodes(list) do
local id = n.id
if id == glyph_code then
- local c = n.char
- local fc = fontchar[n.font]
- if fc then
- local u = fc[c].tounicode
- if u then
- for s in gmatch(u,"..") do
- result[#result+1] = utfchar(tonumber(s,16))
+ local components = n.components
+ if components then
+ toutf(components,result)
+ else
+ local c = n.char
+ local fc = fontchar[n.font]
+ if fc then
+ local u = fc[c].tounicode
+ if u then
+ for s in gmatch(u,"....") do
+ result[#result+1] = utfchar(tonumber(s,16))
+ end
+ else
+ result[#result+1] = utfchar(c)
end
else
result[#result+1] = utfchar(c)
end
- else
- result[#result+1] = utfchar(c)
end
elseif id == disc_code then
toutf(n.replace,result)
@@ -560,12 +580,15 @@ local function toutf(list,result)
result[#result+1] = " "
end
end
+ if n == stopcriterium then
+ break
+ end
end
return result
end
-function nodes.toutf(list)
- return concat(toutf(list,{}))
+function nodes.toutf(list,stopcriterium)
+ return concat(toutf(list,{},stopcriterium))
end
-- might move elsewhere
diff --git a/tex/context/base/node-tsk.lua b/tex/context/base/node-tsk.lua
index 0b2e16b92..a6890c2d5 100644
--- a/tex/context/base/node-tsk.lua
+++ b/tex/context/base/node-tsk.lua
@@ -12,11 +12,13 @@ local trace_tasks = false trackers.register("tasks.creation", function(v) trace
local report_tasks = logs.new("tasks")
+local allocate = utilities.storage.allocate
+
local nodes = nodes
nodes.tasks = nodes.tasks or { }
local tasks = nodes.tasks
-tasks.data = tasks.data or { }
+tasks.data = allocate()
local tasksdata = tasks.data
local sequencers = utilities.sequencers
diff --git a/tex/context/base/node-tst.lua b/tex/context/base/node-tst.lua
index c3b555b20..bfe0051bd 100644
--- a/tex/context/base/node-tst.lua
+++ b/tex/context/base/node-tst.lua
@@ -26,7 +26,7 @@ local belowdisplayshortskip_code = skipcodes.belowdisplayshortskip
local find_node_tail = node.tail or node.slide
-function nodes.the_left_margin(n) -- todo: three values
+function nodes.leftmarginwidth(n) -- todo: three values
while n do
local id = n.id
if id == glue_code then
@@ -42,7 +42,7 @@ function nodes.the_left_margin(n) -- todo: three values
return 0
end
-function nodes.the_right_margin(n)
+function nodes.rightmarginwidth(n)
if n then
n = find_node_tail(n)
while n do
diff --git a/tex/context/base/pack-obj.lua b/tex/context/base/pack-obj.lua
index 07ba6da31..088ad6add 100644
--- a/tex/context/base/pack-obj.lua
+++ b/tex/context/base/pack-obj.lua
@@ -12,21 +12,23 @@ reusable components.</p>
--ldx]]--
local texsprint, texcount = tex.sprint, tex.count
+local allocate, mark = utilities.storage.allocate, utilities.storage.mark
+
+local collected, tobesaved = allocate(), allocate()
local jobobjects = {
- collected = { },
- tobesaved = { },
+ collected = collected,
+ tobesaved = tobesaved,
}
job.objects = jobobjects
-local collected, tobesaved = jobobjects.collected, jobobjects.tobesaved
-
local function initializer()
- collected, tobesaved = jobobjects.collected, jobobjects.tobesaved
+ collected = mark(jobobjects.collected)
+ tobesaved = mark(jobobjects.tobesaved)
end
-job.register('job.objects.collected', jobobjects.tobesaved, initializer, nil)
+job.register('job.objects.collected', tobesaved, initializer, nil)
function jobobjects.save(tag,number,page)
local t = { number, page }
diff --git a/tex/context/base/page-imp.mkiv b/tex/context/base/page-imp.mkiv
index a0f32ef86..e66924e61 100644
--- a/tex/context/base/page-imp.mkiv
+++ b/tex/context/base/page-imp.mkiv
@@ -152,6 +152,8 @@
\let\pagestoshipout\empty % {1,3,6}
\chardef\whichpagetoshipout=0 % 0=all 1=odd 2=even
+\newbox\shipoutscratchbox
+
\def\actualshipout#1%
{\global\advance\shippedoutpages\plusone
% this is not resource safe!
@@ -170,6 +172,23 @@
\donetrue\donefalse
\fi
\ifdone
+ \begingroup
+ \setbox\shipoutscratchbox\hbox{#1}% just in case there are objects there, hook for testing
+ \finalizeshipoutbox\shipoutscratchbox
+ \setbox\scratchbox\hbox
+ {% before the main one !
+ \ifcase\realfolio \or
+ \the\everyfirstshipout
+ \global\everyfirstshipout\emptytoks
+ \fi
+ % the main one
+ \the\everyshipout\relax
+ % always last (and after the main one)
+ \ifnum\realpageno=\lastpage\relax
+ \the\everylastshipout
+ \global\everylastshipout\emptytoks
+ \fi}%
+ \smashbox\scratchbox
\shipout\vbox
{%\forgetall
\offinterlineskip
@@ -177,26 +196,11 @@
\scratchdimen-1in
\vskip\scratchdimen
\hskip\scratchdimen
- \setbox0\hbox{#1}% just in case there are objects there, hook for testing
- \finalizeshipoutbox0%
- \setbox\scratchbox\hbox
- {% before the main one !
- \ifcase\realfolio \or
- \the\everyfirstshipout
- \global\everyfirstshipout\emptytoks
- \fi
- % the main one
- \the\everyshipout\relax
- % always last (and after the main one)
- \ifnum\realpageno=\lastpage\relax
- \the\everylastshipout
- \global\everylastshipout\emptytoks
- \fi}%
- \smashbox\scratchbox
\hbox % \setbox0=\box.. is nicer
{\box\scratchbox
\ifvoid\postponedcontent\else\box\postponedcontent\fi % evt ver naar links !
- \box0}}%
+ \box\shipoutscratchbox}}%
+ \endgroup
\else
\message
{[\ifarrangingpages arranged \fi page
diff --git a/tex/context/base/page-lin.lua b/tex/context/base/page-lin.lua
index 08a10c2dd..7a525172c 100644
--- a/tex/context/base/page-lin.lua
+++ b/tex/context/base/page-lin.lua
@@ -27,6 +27,8 @@ local last = #data
lines.scratchbox = lines.scratchbox or 0
+local leftmarginwidth = nodes.leftmarginwidth
+
storage.register("lines/data", lines.data, "nodes.lines.data")
-- if there is demand for it, we can support multiple numbering streams
@@ -151,8 +153,6 @@ function boxed.setup(n,configuration)
return n
end
-local the_left_margin = nodes.the_left_margin
-
local function check_number(n,a,skip,sameline)
local d = data[a]
if d then
@@ -174,7 +174,7 @@ local function check_number(n,a,skip,sameline)
report_lines("skipping line number %s for setup %s: %s (%s)",#current_list,a,s,d.continue or "no")
end
end
- context.makelinenumber(tag,skipflag,s,n.shift,n.width,the_left_margin(n.list),n.dir)
+ context.makelinenumber(tag,skipflag,s,n.shift,n.width,leftmarginwidth(n.list),n.dir)
end
end
diff --git a/tex/context/base/page-mak.mkii b/tex/context/base/page-mak.mkii
index 040feb1d2..2087a6840 100644
--- a/tex/context/base/page-mak.mkii
+++ b/tex/context/base/page-mak.mkii
@@ -42,8 +42,6 @@
[\??do#1]%
[\c!width=\innermakeupwidth, % example in manual / was \makeupwidth
\c!height=\textheight, % example in manual
- \c!voffset=\!!zeropoint, % example in manual
- \c!hoffset=\!!zeropoint, % example in manual
\c!commands=,
\c!setups=,
\c!page=\v!right,
@@ -214,8 +212,6 @@
[\v!standard]
[\c!width=\innermakeupwidth,
\c!height=\textheight,
- \c!voffset=\!!zeropoint,
- \c!hoffset=\!!zeropoint,
\c!page=\v!right,
\c!doublesided=\v!empty]
diff --git a/tex/context/base/page-mak.mkiv b/tex/context/base/page-mak.mkiv
index fd89a45ff..f43734230 100644
--- a/tex/context/base/page-mak.mkiv
+++ b/tex/context/base/page-mak.mkiv
@@ -42,8 +42,6 @@
[\??do#1]%
[\c!width=\innermakeupwidth, % example in manual / was \makeupwidth
\c!height=\textheight, % example in manual
- \c!voffset=\!!zeropoint, % example in manual
- \c!hoffset=\!!zeropoint, % example in manual
\c!commands=,
\c!setups=,
\c!page=\v!right,
@@ -213,8 +211,6 @@
[\v!standard]
[\c!width=\innermakeupwidth,
\c!height=\textheight,
- \c!voffset=\!!zeropoint,
- \c!hoffset=\!!zeropoint,
\c!page=\v!right,
\c!doublesided=\v!empty]
diff --git a/tex/context/base/page-mar.mkiv b/tex/context/base/page-mar.mkiv
index 464b4f9bb..3eac9b1d4 100644
--- a/tex/context/base/page-mar.mkiv
+++ b/tex/context/base/page-mar.mkiv
@@ -60,12 +60,16 @@
#2]%
\setuvalue{#1}{\dohandlemarginline{#1}}}
-\def\marginlineparameter #1{\csname\??im\??im\currentmarginline#1\endcsname}
-\def\marginlineexecuter #1#2{\executeifdefined{\??im\??im\currentmarginline#1}{#2}}
+\def\marginlineparameter#1{\csname\??im\??im\currentmarginline#1\endcsname}
-\def\dohandlemarginline#1% #2
+\def\marginlineexecuter#1#2%
+ {\dostarttagged\t!margintext\currentmarginline
+ \executeifdefined{\??im\??im\currentmarginline#1}{#2}%
+ \dostoptagged}
+
+\def\dohandlemarginline#1%#2%
{\def\currentmarginline{#1}%
- \csname\s!do\??im\??im\executeifdefined{\??im\??im#1\c!location}\v!left\endcsname{#1}} % {#2}
+ \csname\s!do\??im\??im\executeifdefined{\??im\??im#1\c!location}\v!left\endcsname{#1}}% {#2}
\def\doleftmarginline#1#2% #1 is redundant (we can remove it when we group dohandlemarginline; maybe ...
{\pushindentation
@@ -284,7 +288,9 @@
\@@imbefore
\dostartattributes{\??im\margincontenttag}\c!style\c!color\empty
\dosetupstrut[\margincontentstrut]% was: \setstrut % yes or no
+ \dostarttagged\t!margintextblock\currentmargincontent % margincontenttag
\begstrut#3\endstrut\endgraf
+ \dostoptagged
\xdef\margincontentstrutheight{\the\strutht}% so that it's known outside the framed
\dostopattributes
\@@imafter}%
@@ -676,7 +682,7 @@
\noexpand \doinmargin[\executeifdefined{\??im\margincontenttag\c!location}\@@imlocation][\v!normal][\margincontenttag][\margincontenttag][#2]%
\to \collectedmargintexts
\doglobal \appendtoks
- {#3}%
+ {#3}% argument to previously appended \doinmargin
\to \collectedmargintexts}
\def\doflushmargincontents % plural
diff --git a/tex/context/base/regi-ini.lua b/tex/context/base/regi-ini.lua
index 6c4d787c1..f69f8866f 100644
--- a/tex/context/base/regi-ini.lua
+++ b/tex/context/base/regi-ini.lua
@@ -80,7 +80,7 @@ function regimes.enable(regime)
if data[regime] then
regimes.currentregime = regime
local translate = regimes.translate
- resolvers.install_text_filter('input',function(s)
+ resolvers.filters.install('input',function(s)
return translate(s,regime)
end)
else
@@ -90,5 +90,5 @@ end
function regimes.disable()
regimes.currentregime = "utf"
- resolvers.install_text_filter('input',nil)
+ resolvers.filters.install('input',nil)
end
diff --git a/tex/context/base/scrn-men.mkiv b/tex/context/base/scrn-men.mkiv
index 29085d87a..63f72c463 100644
--- a/tex/context/base/scrn-men.mkiv
+++ b/tex/context/base/scrn-men.mkiv
@@ -74,12 +74,12 @@
%D Define menus:
-\def\setmenuparameter#1#2#3{\@EA\def\csname\??am#1#2\endcsname{#3}}
-\def\letmenuparameter #1#2{\@EA\let\csname\??am#1#2\endcsname}
+\def\setmenuparameter#1#2#3{\@EA\def\csname\??am#1:#2\endcsname{#3}}
+\def\letmenuparameter #1#2{\@EA\let\csname\??am#1:#2\endcsname}
-\def\menuparameter #1{\csname\domenuparameter{\??am\currentmenu}#1\endcsname}
-\def\namedmenuparameter#1#2{\csname\domenuparameter{\??am #1}#2\endcsname}
-\def\menuparameterhash #1{\domenuparameterhash {\??am\currentmenu}#1}
+\def\menuparameter #1{\csname\domenuparameter{\??am\currentmenu:}#1\endcsname}
+\def\namedmenuparameter#1#2{\csname\domenuparameter{\??am #1:}#2\endcsname}
+\def\menuparameterhash #1{\domenuparameterhash {\??am\currentmenu:}#1}
\def\domenuparameter #1#2{\ifcsname#1#2\endcsname#1#2\else\expandafter\domenuparentparameter \csname#1\s!parent\endcsname#2\fi}
\def\domenuparameterhash#1#2{\ifcsname#1#2\endcsname #1\else\expandafter\domenuparentparameterhash\csname#1\s!parent\endcsname#2\fi}
@@ -90,7 +90,7 @@
\unexpanded\def\defineinteractionmenu
{\dotripleempty\dodefineinteractionmenu}
-\def\dodefineinteractionmenu[#1][#2][#3]% [name] [location] [settings]
+\def\dodefineinteractionmenu[#1][#2][#3]% [name] [location] [settings|parent] % right right vertical
{\ifsecondargument
\ifcsname\??am:\c!list:#2\endcsname \else
\letvalue{\??am:\c!list:#2}\empty
@@ -98,19 +98,19 @@
\normalexpanded{\noexpand\addtocommalist{#1}\@EA\noexpand\csname\??am:\c!list:#2\endcsname}%
\setvalue{\@@dodolistelement#1}{\def\dosomelistelement{\dodomenulistelement{#1}}}%
\ifthirdargument
- \presetlocalframed[\??am#1]%
+ \presetlocalframed[\??am#1:]%
\doifassignmentelse{#3}
{\doifelse{#1}{#2}
- {\getparameters[\??am#1][\c!location=#2,\c!menu=,\s!parent=\??am,#3]}
- {\getparameters[\??am#1][\c!location=#2,\c!menu=,\s!parent=\??am#2,#3]}}%
+ {\getparameters[\??am#1:][\c!location=#2,\c!menu=,\s!parent=\??am,#3]}
+ {\getparameters[\??am#1:][\c!location=#2,\c!menu=,\s!parent=\??am#2:,#3]}}%
{\doifelsenothing{#3}
- {\getparameters[\??am#1][\c!location=#2,\c!menu=,\s!parent=\??am]}
- {\getparameters[\??am#1][\c!location=#2,\c!menu=,\s!parent=\??am#3]}}%
+ {\getparameters[\??am#1:][\c!location=#2,\c!menu=,\s!parent=\??am]}
+ {\getparameters[\??am#1:][\c!location=#2,\c!menu=,\s!parent=\??am#3:]}}%
\else
- \getparameters[\??am#1][\c!location=#2,\c!menu=,\s!parent=\??am#2]%
+ \getparameters[\??am#1:][\c!location=#2,\c!menu=,\s!parent=\??am#2:]%
\fi
\else
- \getparameters[\??am#1][\s!parent=\??am]% simple cloning
+ \getparameters[\??am#1:][\s!parent=\??am]% simple cloning, like vertical
\fi}
\def\currentmenulist{\ifcsname\??am:\c!list:\currentmenu\endcsname\csname\??am:\c!list:\currentmenu\endcsname\fi}
@@ -121,7 +121,7 @@
{\dodoubleargument\dosetupinteractionmenu}
\def\dosetupinteractionmenu[#1][#2]%
- {\def\docommand##1{\getparameters[\??am##1][#2]}%
+ {\def\docommand##1{\getparameters[\??am##1:][#2]}%
\processcommalist[#1]\docommand}
\unexpanded\def\setupinteractionmenus[#1]%
@@ -157,19 +157,6 @@
\csname\??am:\c!menu:#1\endcsname
\fi}
-% \unexpanded\def\defineinteractionmenuclass
-% {\dodoubleargument\dodefineinteractionmenuclass}
-%
-% \def\dodefineinteractionmenuclass[#1][#2]% tag hori|veri
-% {\doifelse{#2}\v!vertical
-% {\setvalue{\??am:\c!menu:#1}{\verticalinteractionmenu {#1}{\getvalue{\??am#1\c!width }}}}
-% {\setvalue{\??am:\c!menu:#1}{\horizontalinteractionmenu{#1}{\getvalue{\??am#1\c!height}}}}}
-%
-% \defineinteractionmenuclass[\v!left ][\v!horizontal]
-% \defineinteractionmenuclass[\v!right ][\v!horizontal]
-% \defineinteractionmenuclass[\v!top ][\v!vertical]
-% \defineinteractionmenuclass[\v!bottom][\v!vertical]
-
\setvalue{\??am:\c!menu :\v!left }{\horizontalinteractionmenu\v!left \leftedgewidth }
\setvalue{\??am:\c!menu :\v!right }{\horizontalinteractionmenu\v!right \rightedgewidth}
\setvalue{\??am:\c!menu :\v!top }{\verticalinteractionmenu \v!top \topheight }
@@ -349,6 +336,8 @@
%D The menu commands:
+% to be redone, using parent inheritance instead
+
% ja : kader/achtergrond met tekst
% leeg : kader/achtergrond maar geen tekst
% nee : alleen ruimte reserveren
@@ -380,6 +369,21 @@
% \dosetfontattribute {#1}{#2}%
% \dosetcolorattribute{#1}{#3}%
+\def\locboxyesinject
+ {\ctxlua{structures.references.injectcurrentset(nil,nil)}}
+
+\def\locboxyesnormal#1#2#3%
+ {\hbox attr \referenceattribute \lastreferenceattribute {\localframed[#1][#2]{#3}}}
+
+\def\locboxyescontrast#1#2#3%
+ {\hbox attr \referenceattribute \lastreferenceattribute {\localframed[#1][#2,\c!color=\menuparameter\c!contrastcolor]{#3}}}
+
+\def\locboxyesempty#1#2#3%
+ {\localframed[#1][\c!empty=\v!yes,#2]{#3}}
+
+\def\locboxyesnothing#1#2#3%
+ {\localframed[#1][\c!empty=\v!yes,\c!frame=,\c!background=,#2]{#1}}
+
\def\setlocationboxyes#1[#2]#3[#4]% needs to be split as the attr is not applicable to the box
{\begingroup
\settrue\usemenuclick
@@ -389,33 +393,45 @@
{\analyzecurrentreference % we need to act on the state
\ifcase\referencepagestate
% something else than a page reference
- \ctxlua{structures.references.injectcurrentset(nil,nil)}%
- \hbox attr \referenceattribute \lastreferenceattribute {\localframed[#1][#2]{#3}}%
- \else\ifcase\csname\??am:\c!location:\menuparameter\c!samepage\endcsname\relax
- % yes: same page or not ... todo
- \ctxlua{structures.references.injectcurrentset(nil,nil)}%
- \ifnum\referencepagestate=\plusone % same page
- \hbox attr \referenceattribute \lastreferenceattribute {\localframed[#1][#2,\c!color=\menuparameter\c!contrastcolor]{#3}}%
- \else % elsewhere
- \hbox attr \referenceattribute \lastreferenceattribute {\localframed[#1][#2]{#3}}%
+ \locboxyesinject
+ \locboxyesnormal{#1}{#2}{#3}%
+ \else
+ \ifcase\csname\??am:\c!location:\menuparameter\c!samepage\endcsname\relax
+ % yes: same page or not ... todo
+ \locboxyesinject
+ \ifnum\referencepagestate=\plusone % same page
+ \locboxyescontrast{#1}{#2}{#3}%
+ \else % elsewhere
+ \locboxyesnormal{#1}{#2}{#3}%
+ \fi
+ \or
+ % empty but frame: no click
+ \ifnum\referencepagestate=\plusone % same page
+ \locboxyesempty{#1}{#2}{#3}
+ \else % elsewhere
+ \locboxyesinject
+ \locboxyesnormal{#1}{#2}{#3}%
+ \fi
+ \or
+ % empty no frame: no
+ \ifnum\referencepagestate=\plusone % same page
+ \locboxyesnothing{#1}{#2}{#3}%
+ \else % elsewhere
+ \locboxyesinject
+ \locboxyesnormal{#1}{#2}{#3}%
+ \fi
+ \or
+ % nothing at all
+ \global\settrue\skippedmenuitem
\fi
- \or
- % empty but frame: no click
- \localframed[#1][\c!empty=\v!yes,#2]{#3}%
- \or
- % empty no frame: no
- \localframed[#1][\c!empty=\v!yes,\c!frame=,\c!background=,#2]{#3}%
- \or
- % nothing at all
- \global\settrue\skippedmenuitem
- \fi\fi}%
+ \fi}%
{\unknownreference{#4}%
\ifcase\csname\??am:\c!location:\menuparameter\c!unknownreference\endcsname\relax
\localframed[#1][#2]{#3}%
\or
- \localframed[#1][\c!empty=\v!yes,#2]{#3}%
+ \locboxyesempty{#1}{#2}{#3}
\or
- \localframed[#1][\c!empty=\v!yes,\c!frame=,\c!background=,#2]{#1}%
+ \locboxyesnothing{#1}{#2}{#3}%
\or
\global\skippedmenuitemtrue
\fi}%
@@ -431,19 +447,19 @@
{\addsomemenuitem{\gotobox{\ignorespaces#2\unskip}[#1]}}
\def\menu@but[#1]#2\\%
- {\addsomemenuitem{\domenuitemposition\currentmenu{#1}{\setlocationboxyes{\??am\currentmenu}[]{\ignorespaces#2\unskip}[#1]}}}
+ {\addsomemenuitem{\domenuitemposition\currentmenu{#1}{\setlocationboxyes{\??am\currentmenu:}[]{\ignorespaces#2\unskip}[#1]}}}
\def\menu@got[#1]#2\\%
- {\addsomemenuitem{\setlocationboxyes{\??am\currentmenu}[\c!frame=\v!off,\c!background=]{\ignorespaces#2\unskip}[#1]}}
+ {\addsomemenuitem{\setlocationboxyes{\??am\currentmenu:}[\c!frame=\v!off,\c!background=]{\ignorespaces#2\unskip}[#1]}}
\def\menu@nop#1\\%
- {\addsomemenuitem{\setlocationboxraw{\??am\currentmenu}[\c!frame=\v!off,\c!background=,\c!empty=\v!yes]{\ignorespaces#1\unskip}[]}}
+ {\addsomemenuitem{\setlocationboxraw{\??am\currentmenu:}[\c!frame=\v!off,\c!background=,\c!empty=\v!yes]{\ignorespaces#1\unskip}[]}}
\def\menu@txt#1\\%
- {\addsomemenuitem{\localframed[\??am\currentmenu][\c!frame=\v!off,\c!background=]{\ignorespaces#1\unskip}}}
+ {\addsomemenuitem{\localframed[\??am\currentmenu:][\c!frame=\v!off,\c!background=]{\ignorespaces#1\unskip}}}
\def\menu@rul#1\\%
- {\addsomemenuitem{\localframed[\??am\currentmenu][]{\ignorespaces#1\unskip}}}
+ {\addsomemenuitem{\localframed[\??am\currentmenu:][]{\ignorespaces#1\unskip}}}
\def\menu@com#1\\%
{\ignorespaces#1\unskip\ignorespaces}
@@ -473,7 +489,7 @@
\def\dodomenulistelement#1#2#3#4#5#6#7%
{\addsomemenuitem{\domenuitemposition\currentmenu{internal(#3)}%
- {\setlocationboxyes{\??am\currentmenu}[]{\limitatetext{#5}{\namedlistparameter{#2}\c!maxwidth}{\unknown}}[internal(#3)]}}}
+ {\setlocationboxyes{\??am\currentmenu:}[]{\limitatetext{#5}{\namedlistparameter{#2}\c!maxwidth}{\unknown}}[internal(#3)]}}}
\unexpanded\def\menubutton
{\dodoubleempty\domenubutton}
@@ -491,10 +507,10 @@
\@EA\domenubuttonA
\fi[#1]}
-\def\domenubuttonA[#1][#2]#3[#4]{\setlocationboxyes\??bt[]{#3}[#4]} % normal button, no parameters
-\def\domenubuttonB[#1][#2]#3[#4]{\setlocationboxyes{\??am#1}[#2]{#3}[#4]} % menu button, with parameters
-\def\domenubuttonC[#1][#2]#3[#4]{\setlocationboxyes\??bt[#1]{#3}[#4]} % normal button, with parameters
-\def\domenubuttonD[#1][#2]#3[#4]{\setlocationboxyes{\??am#1}[]{#3}[#4]} % menu button, no parameters
+\def\domenubuttonA[#1][#2]#3[#4]{\setlocationboxyes \??bt[]{#3}[#4]} % normal button, no parameters
+\def\domenubuttonB[#1][#2]#3[#4]{\setlocationboxyes{\??am#1:}[#2]{#3}[#4]} % menu button, with parameters
+\def\domenubuttonC[#1][#2]#3[#4]{\setlocationboxyes \??bt[#1]{#3}[#4]} % normal button, with parameters
+\def\domenubuttonD[#1][#2]#3[#4]{\setlocationboxyes {\??am#1:}[]{#3}[#4]} % menu button, no parameters
\def\menubox
{\dodoubleempty\domenubox}
diff --git a/tex/context/base/scrp-ini.lua b/tex/context/base/scrp-ini.lua
index 949bbe2a7..b543053af 100644
--- a/tex/context/base/scrp-ini.lua
+++ b/tex/context/base/scrp-ini.lua
@@ -15,6 +15,8 @@ local trace_injections = false trackers.register("scripts.injections", function
local report_preprocessing = logs.new("preprocessing")
+local allocate = utilities.storage.allocate
+
local set_attribute = node.set_attribute
local has_attribute = node.has_attribute
local first_character = node.first_character
@@ -32,23 +34,23 @@ local prestat = attributes.private('prestat')
local fontdata = fonts.ids
-local fcs = (fonts.color and fonts.color.set) or function() end
-local fcr = (fonts.color and fonts.color.reset) or function() end
+local fcs = fonts.colors.set
+local fcr = fonts.colors.reset
-scripts = scripts or { }
-local scripts = scripts
+scripts = scripts or { }
+local scripts = scripts
-scripts.handlers = scripts.handlers or { }
-local handlers = scripts.handlers
+scripts.handlers = scripts.handlers or { }
+local handlers = scripts.handlers
-scripts.names = scripts.names or { }
-local names = scripts.names
+scripts.names = allocate()
+local names = scripts.names
-scripts.numbers = scripts.numbers or { }
-local numbers = scripts.numbers
+scripts.numbers = allocate()
+local numbers = scripts.numbers
-scripts.hash = scripts.hash or { }
-local hash = scripts.hash
+scripts.hash = scripts.hash or { }
+local hash = scripts.hash
storage.register("scripts/hash", hash, "scripts.hash")
@@ -181,7 +183,7 @@ end
-- the following tables will become a proper installer
-scripts.colors = { -- todo: just named colors
+scripts.colors = allocate { -- todo: just named colors
korean = "font:isol",
chinese = "font:rest",
full_width_open = "font:init",
@@ -197,7 +199,7 @@ scripts.colors = { -- todo: just named colors
local colors = scripts.colors
-local numbertokind = {
+local numbertokind = allocate {
"korean",
"chinese",
"full_width_open",
@@ -211,7 +213,7 @@ local numbertokind = {
"jamo_final",
}
-local kindtonumber = {
+local kindtonumber = allocate {
korean = 1,
chinese = 2,
full_width_open = 3,
diff --git a/tex/context/base/sort-ini.lua b/tex/context/base/sort-ini.lua
index 2bafe8e5e..17cef9890 100644
--- a/tex/context/base/sort-ini.lua
+++ b/tex/context/base/sort-ini.lua
@@ -18,15 +18,17 @@ local utfcharacters, utfvalues, strcharacters = string.utfcharacters, string.utf
local chardata = characters.data
local next, type, tonumber = next, type, tonumber
+local allocate = utilities.storage.allocate
+
local trace_tests = false trackers.register("sorters.tests", function(v) trace_tests = v end)
local report_sorters = logs.new("sorters")
local comparers = { }
local splitters = { }
-local entries = { }
-local mappings = { }
-local replacements = { }
+local entries = allocate()
+local mappings = allocate()
+local replacements = allocate()
local ignoredoffset = 0x10000
local replacementoffset = 0x10000
local digitsoffset = 0x20000
diff --git a/tex/context/base/sort-lan.lua b/tex/context/base/sort-lan.lua
index 31824c964..322af2598 100644
--- a/tex/context/base/sort-lan.lua
+++ b/tex/context/base/sort-lan.lua
@@ -6,10 +6,12 @@ if not modules then modules = { } end modules ['sort-lan'] = {
license = "see context related readme files"
}
--- this is a rather preliminary and incomplete file
--- maybe we should load this kind of stuff runtime
-
--- replacements are indexed as they need to be applied in sequence
+-- Many vectors were supplied by Wolfgang Schuster and Philipp
+-- Gesang.
+--
+-- Replacements are indexed as they need to be applied in sequence
+--
+-- Maybe we should load these tables runtime, just like patterns.
local utf = unicode.utf8
local uc = utf.char
@@ -74,112 +76,112 @@ replacements['nl'] = { { "ij", 'y' }, { "IJ", 'Y' } }
entries ['nl'] = entries ['en']
mappings ['nl'] = mappings['en']
--- czech
-
-local cz_ch = uc(replacementoffset + 1)
-local cz_CH = uc(replacementoffset + 2)
-
-replacements['cz'] = {
- [1] = { "ch", cz_ch }
-}
-
-entries['cz'] = {
- ['a'] = "a", -- a
- [uc(0x00E1)] = "a", -- aacute
- ['b'] = "b", -- b
- ['c'] = "c", -- c
- [uc(0x010D)] = uc(0x010D), -- ccaron
- ['d'] = "d", -- d
- [uc(0x010F)] = "d", -- dcaron
- ['e'] = "e", -- e
- [uc(0x00E9)] = "e", -- eacute
- [uc(0x011B)] = "e", -- ecaron
- ['f'] = "f", -- f
- ['g'] = "g", -- g
- ['h'] = "h", -- h
- [cz_ch] = "ch", -- ch
- ['i'] = "i", -- i
- [uc(0x00ED)] = "i", -- iacute
- ['j'] = "j", -- j
- ['k'] = "k", -- k
- ['l'] = "l", -- l
- ['m'] = "m", -- m
- ['n'] = "n", -- n
- ['ň'] = "n", -- ncaron
- ['o'] = "o", -- o
- ['p'] = "p", -- p
- ['q'] = "q", -- q
- ['r'] = "r", -- r
- ['ř'] = "ř", -- rcaron
- ['s'] = "s", -- s
- [uc(0x0161)] = uc(0x0161), -- scaron
- ['t'] = "t", -- t
- [uc(0x0165)] = "t", -- tcaron
- ['u'] = "u", -- u
- [uc(0x00FA)] = "u", -- uacute
- [uc(0x016F)] = "u", -- uring
- ['v'] = "v", -- v
- ['w'] = "w", -- w
- ['x'] = "x", -- x
- ['y'] = "y", -- y
- [uc(0x00FD)] = "y", -- yacute
- ['z'] = "z", -- z
- [uc(0x017E)] = uc(0x017E), -- zcaron
-}
-
-mappings['cz'] = {
- ['a'] = 1, -- a
- [uc(0x00E1)] = 3, -- aacute
- ['b'] = 5, -- b
- ['c'] = 7, -- c
- [uc(0x010D)] = 9, -- ccaron
- ['d'] = 11, -- d
- [uc(0x010F)] = 13, -- dcaron
- ['e'] = 15, -- e
- [uc(0x00E9)] = 17, -- eacute
- [uc(0x011B)] = 19, -- ecaron
- ['f'] = 21, -- f
- ['g'] = 23, -- g
- ['h'] = 25, -- h
- [cz_ch] = 27, -- ch
- ['i'] = 29, -- i
- [uc(0x00ED)] = 31, -- iacute
- ['j'] = 33, -- j
- ['k'] = 35, -- k
- ['l'] = 37, -- l
- ['m'] = 39, -- m
- ['n'] = 41, -- n
- ['ň'] = 43, -- ncaron
- ['o'] = 45, -- o
- ['p'] = 47, -- p
- ['q'] = 49, -- q
- ['r'] = 51, -- r
- ['ř'] = 53, -- rcaron
- ['s'] = 55, -- s
- [uc(0x0161)] = 57, -- scaron
- ['t'] = 59, -- t
- [uc(0x0165)] = 61, -- tcaron
- ['u'] = 63, -- u
- [uc(0x00FA)] = 65, -- uacute
- [uc(0x016F)] = 67, -- uring
- ['v'] = 69, -- v
- ['w'] = 71, -- w
- ['x'] = 73, -- x
- ['y'] = 75, -- y
- [uc(0x00FD)] = 77, -- yacute
- ['z'] = 79, -- z
- [uc(0x017E)] = 81, -- zcaron
-}
-
-adduppercaseentries ("cz")
-adduppercasemappings("cz") -- 1 can be option (but then we need a runtime variant)
-
-entries ['cz'][cz_CH] = entries ['cz'][cz_ch]
-mappings['cz'][cz_CH] = mappings['cz'][cz_ch]
-
-replacements['cs'] = replacements['cz']
-entries ['cs'] = entries ['cz']
-mappings ['cs'] = mappings ['cz']
+--~ -- czech (defined later)
+--~
+--~ local cz_ch = uc(replacementoffset + 1)
+--~ local cz_CH = uc(replacementoffset + 2)
+--~
+--~ replacements['cz'] = {
+--~ [1] = { "ch", cz_ch }
+--~ }
+--~
+--~ entries['cz'] = {
+--~ ['a'] = "a", -- a
+--~ [uc(0x00E1)] = "a", -- aacute
+--~ ['b'] = "b", -- b
+--~ ['c'] = "c", -- c
+--~ [uc(0x010D)] = uc(0x010D), -- ccaron
+--~ ['d'] = "d", -- d
+--~ [uc(0x010F)] = "d", -- dcaron
+--~ ['e'] = "e", -- e
+--~ [uc(0x00E9)] = "e", -- eacute
+--~ [uc(0x011B)] = "e", -- ecaron
+--~ ['f'] = "f", -- f
+--~ ['g'] = "g", -- g
+--~ ['h'] = "h", -- h
+--~ [cz_ch] = "ch", -- ch
+--~ ['i'] = "i", -- i
+--~ [uc(0x00ED)] = "i", -- iacute
+--~ ['j'] = "j", -- j
+--~ ['k'] = "k", -- k
+--~ ['l'] = "l", -- l
+--~ ['m'] = "m", -- m
+--~ ['n'] = "n", -- n
+--~ ['ň'] = "n", -- ncaron
+--~ ['o'] = "o", -- o
+--~ ['p'] = "p", -- p
+--~ ['q'] = "q", -- q
+--~ ['r'] = "r", -- r
+--~ ['ř'] = "ř", -- rcaron
+--~ ['s'] = "s", -- s
+--~ [uc(0x0161)] = uc(0x0161), -- scaron
+--~ ['t'] = "t", -- t
+--~ [uc(0x0165)] = "t", -- tcaron
+--~ ['u'] = "u", -- u
+--~ [uc(0x00FA)] = "u", -- uacute
+--~ [uc(0x016F)] = "u", -- uring
+--~ ['v'] = "v", -- v
+--~ ['w'] = "w", -- w
+--~ ['x'] = "x", -- x
+--~ ['y'] = "y", -- y
+--~ [uc(0x00FD)] = "y", -- yacute
+--~ ['z'] = "z", -- z
+--~ [uc(0x017E)] = uc(0x017E), -- zcaron
+--~ }
+--~
+--~ mappings['cz'] = {
+--~ ['a'] = 1, -- a
+--~ [uc(0x00E1)] = 3, -- aacute
+--~ ['b'] = 5, -- b
+--~ ['c'] = 7, -- c
+--~ [uc(0x010D)] = 9, -- ccaron
+--~ ['d'] = 11, -- d
+--~ [uc(0x010F)] = 13, -- dcaron
+--~ ['e'] = 15, -- e
+--~ [uc(0x00E9)] = 17, -- eacute
+--~ [uc(0x011B)] = 19, -- ecaron
+--~ ['f'] = 21, -- f
+--~ ['g'] = 23, -- g
+--~ ['h'] = 25, -- h
+--~ [cz_ch] = 27, -- ch
+--~ ['i'] = 29, -- i
+--~ [uc(0x00ED)] = 31, -- iacute
+--~ ['j'] = 33, -- j
+--~ ['k'] = 35, -- k
+--~ ['l'] = 37, -- l
+--~ ['m'] = 39, -- m
+--~ ['n'] = 41, -- n
+--~ ['ň'] = 43, -- ncaron
+--~ ['o'] = 45, -- o
+--~ ['p'] = 47, -- p
+--~ ['q'] = 49, -- q
+--~ ['r'] = 51, -- r
+--~ ['ř'] = 53, -- rcaron
+--~ ['s'] = 55, -- s
+--~ [uc(0x0161)] = 57, -- scaron
+--~ ['t'] = 59, -- t
+--~ [uc(0x0165)] = 61, -- tcaron
+--~ ['u'] = 63, -- u
+--~ [uc(0x00FA)] = 65, -- uacute
+--~ [uc(0x016F)] = 67, -- uring
+--~ ['v'] = 69, -- v
+--~ ['w'] = 71, -- w
+--~ ['x'] = 73, -- x
+--~ ['y'] = 75, -- y
+--~ [uc(0x00FD)] = 77, -- yacute
+--~ ['z'] = 79, -- z
+--~ [uc(0x017E)] = 81, -- zcaron
+--~ }
+--~
+--~ adduppercaseentries ("cz")
+--~ adduppercasemappings("cz") -- 1 can be option (but then we need a runtime variant)
+--~
+--~ entries ['cz'][cz_CH] = entries ['cz'][cz_ch]
+--~ mappings['cz'][cz_CH] = mappings['cz'][cz_ch]
+--~
+--~ replacements['cs'] = replacements['cz']
+--~ entries ['cs'] = entries ['cz']
+--~ mappings ['cs'] = mappings ['cz']
--~ print(table.serialize(mappings.cs))
@@ -320,11 +322,310 @@ mappings['sl'] = {
adduppercaseentries ("sl")
adduppercasemappings("sl") -- cf. MM
-sorters.replacements["pl"] = {
- -- no replacements
+-- The following (quite some) languages were provided by Philipp
+-- Gesang (Phg), megas.kapaneus@gmail.com.
+
+replacements["ru"] = { --[[ None, do you miss any? ]] }
+
+entries["ru"] = {
+ ["а"] = "а", ["б"] = "б", ["в"] = "в", ["г"] = "г", ["д"] = "д",
+ ["е"] = "е", ["ё"] = "е", ["ж"] = "ж", ["з"] = "з", ["и"] = "и",
+ ["і"] = "и", ["й"] = "й", ["к"] = "к", ["л"] = "л", ["м"] = "м",
+ ["н"] = "н", ["о"] = "о", ["п"] = "п", ["р"] = "р", ["с"] = "с",
+ ["т"] = "т", ["у"] = "у", ["ф"] = "ф", ["х"] = "х", ["ц"] = "ц",
+ ["ч"] = "ч", ["ш"] = "ш", ["щ"] = "щ", ["ъ"] = "ъ", ["ы"] = "ы",
+ ["ь"] = "ь", ["ѣ"] = "ѣ", ["э"] = "э", ["ю"] = "ю", ["я"] = "я",
+ ["ѳ"] = "ѳ", ["ѵ"] = "ѵ",
+}
+
+mappings["ru"] = {
+ ["а"] = 1, ["б"] = 2, ["в"] = 3, ["г"] = 4, ["д"] = 5,
+ ["е"] = 6, ["ё"] = 6, ["ж"] = 7, ["з"] = 8, ["и"] = 9,
+ ["і"] = 9, ["й"] = 10, ["к"] = 11, ["л"] = 12, ["м"] = 13,
+ ["н"] = 14, ["о"] = 15, ["п"] = 16, ["р"] = 17, ["с"] = 18,
+ ["т"] = 19, ["у"] = 20, ["ф"] = 21, ["х"] = 22, ["ц"] = 23,
+ ["ч"] = 24, ["ш"] = 25, ["щ"] = 26, ["ъ"] = 27, ["ы"] = 28,
+ ["ь"] = 29, ["ѣ"] = 30, ["э"] = 31, ["ю"] = 32, ["я"] = 33,
+ ["ѳ"] = 34, ["ѵ"] = 35,
+}
+
+adduppercaseentries ("ru")
+adduppercasemappings("ru")
+
+--- Basic Ukrainian
+
+replacements["uk"] = { --[[ None, do you miss any? ]] }
+
+entries["uk"] = {
+ ["а"] = "а", ["б"] = "б", ["в"] = "в", ["г"] = "г", ["ґ"] = "ґ",
+ ["д"] = "д", ["е"] = "е", ["є"] = "є", ["ж"] = "ж", ["з"] = "з",
+ ["и"] = "и", ["і"] = "і", ["ї"] = "ї", ["й"] = "й", ["к"] = "к",
+ ["л"] = "л", ["м"] = "м", ["н"] = "н", ["о"] = "о", ["п"] = "п",
+ ["р"] = "р", ["с"] = "с", ["т"] = "т", ["у"] = "у", ["ф"] = "ф",
+ ["х"] = "х", ["ц"] = "ц", ["ч"] = "ч", ["ш"] = "ш", ["щ"] = "щ",
+ ["ь"] = "ь", ["ю"] = "ю", ["я"] = "я",
+}
+
+mappings["uk"] = {
+ ["а"] = 1, ["б"] = 2, ["в"] = 3, ["г"] = 4, ["ґ"] = 5,
+ ["д"] = 6, ["е"] = 7, ["є"] = 8, ["ж"] = 9, ["з"] = 10,
+ ["и"] = 11, ["і"] = 12, ["ї"] = 13, ["й"] = 14, ["к"] = 15,
+ ["л"] = 16, ["м"] = 17, ["н"] = 18, ["о"] = 19, ["п"] = 20,
+ ["р"] = 21, ["с"] = 22, ["т"] = 23, ["у"] = 24, ["ф"] = 25,
+ ["х"] = 26, ["ц"] = 27, ["ч"] = 28, ["ш"] = 29, ["щ"] = 30,
+ ["ь"] = 31, ["ю"] = 32, ["я"] = 33,
+}
+
+adduppercaseentries ("uk")
+adduppercasemappings("uk")
+
+--- Belarusian
+
+replacements["be"] = { --[[ None, do you miss any? ]] }
+
+entries["be"] = {
+ ["а"] = "а", ["б"] = "б", ["в"] = "в", ["г"] = "г", ["д"] = "д",
+ ["е"] = "е", ["ё"] = "е", ["ж"] = "ж", ["з"] = "з", ["і"] = "і",
+ ["й"] = "й", ["к"] = "к", ["л"] = "л", ["м"] = "м", ["н"] = "н",
+ ["о"] = "о", ["п"] = "п", ["р"] = "р", ["с"] = "с", ["т"] = "т",
+ ["у"] = "у", ["ў"] = "ў", ["ф"] = "ф", ["х"] = "х", ["ц"] = "ц",
+ ["ч"] = "ч", ["ш"] = "ш", ["ы"] = "ы", ["ь"] = "ь", ["э"] = "э",
+ ["ю"] = "ю", ["я"] = "я",
+}
+
+mappings["be"] = {
+ ["а"] = 1, ["б"] = 2, ["в"] = 3, ["г"] = 4, ["д"] = 5,
+ ["е"] = 6, ["ё"] = 6, ["ж"] = 7, ["з"] = 8, ["і"] = 9,
+ ["й"] = 10, ["к"] = 11, ["л"] = 12, ["м"] = 13, ["н"] = 14,
+ ["о"] = 15, ["п"] = 16, ["р"] = 17, ["с"] = 18, ["т"] = 19,
+ ["у"] = 20, ["ў"] = 21, ["ф"] = 22, ["х"] = 23, ["ц"] = 24,
+ ["ч"] = 25, ["ш"] = 26, ["ы"] = 27, ["ь"] = 28, ["э"] = 29,
+ ["ю"] = 30, ["я"] = 31,
}
-sorters.entries["pl"] = {
+adduppercaseentries ("be")
+adduppercasemappings("be")
+
+--- Bulgarian
+
+replacements["bg"] = { --[[ None, do you miss any? ]] }
+
+entries["bg"] = {
+ ["а"] = "а",
+ ["б"] = "б",
+ ["в"] = "в",
+ ["г"] = "г",
+ ["д"] = "д",
+ ["е"] = "е",
+ ["ж"] = "ж",
+ ["з"] = "з",
+ ["и"] = "и",
+ ["й"] = "й",
+ ["к"] = "к",
+ ["a"] = "a",
+ ["л"] = "л",
+ ["a"] = "a",
+ ["м"] = "м",
+ ["н"] = "н",
+ ["о"] = "о",
+ ["п"] = "п",
+ ["р"] = "р",
+ ["с"] = "с",
+ ["т"] = "т",
+ ["у"] = "у",
+ ["ф"] = "ф",
+ ["х"] = "х",
+ ["ц"] = "ц",
+ ["ч"] = "ч",
+ ["ш"] = "ш",
+ ["щ"] = "щ",
+ ["ъ"] = "ъ",
+ ["ь"] = "ь",
+ ["ю"] = "ю",
+ ["я"] = "я",
+}
+
+mappings["bg"] = {
+ ["а"] = 1,
+ ["б"] = 2,
+ ["в"] = 3,
+ ["г"] = 4,
+ ["д"] = 5,
+ ["е"] = 6,
+ ["ж"] = 7,
+ ["з"] = 8,
+ ["и"] = 9,
+ ["й"] = 10,
+ ["к"] = 11,
+ ["a"] = 12,
+ ["л"] = 13,
+ ["a"] = 14,
+ ["м"] = 15,
+ ["н"] = 16,
+ ["о"] = 17,
+ ["п"] = 18,
+ ["р"] = 19,
+ ["с"] = 20,
+ ["т"] = 21,
+ ["у"] = 22,
+ ["ф"] = 23,
+ ["х"] = 24,
+ ["ц"] = 25,
+ ["ч"] = 26,
+ ["ш"] = 27,
+ ["щ"] = 28,
+ ["ъ"] = 29,
+ ["ь"] = 30,
+ ["ю"] = 31,
+ ["я"] = 32,
+}
+
+adduppercaseentries ("bg")
+adduppercasemappings("bg")
+
+--- Old Church Slavonic
+
+-- The language symbol “cu” is taken from the Wikipedia subdomain
+-- cu.wikipedia.org.
+
+local cu_uk = uc(replacementoffset + 1)
+local cu_UK = uc(replacementoffset + 2)
+
+replacements["cu"] = {
+ [1] = { "оу", cu_uk },
+}
+
+entries["cu"] = {
+ ["а"] = "а",
+ ["б"] = "б",
+ ["в"] = "в",
+ ["г"] = "г",
+ ["д"] = "д",
+ ["є"] = "є",
+ ["ж"] = "ж",
+ ["ѕ"] = "ѕ",
+ ["ꙃ"] = "ѕ", -- Dzělo, U+0292, alternative: dz U+01f3
+ ["з"] = "з",
+ ["ꙁ"] = "з", -- Zemlja
+ ["и"] = "и",
+ ["і"] = "и",
+ ["ї"] = "и",
+ ["ћ"] = "ћ",
+ ["к"] = "к",
+ ["л"] = "л",
+ ["м"] = "м",
+ ["н"] = "н",
+ ["о"] = "о",
+ ["п"] = "п",
+ ["р"] = "р",
+ ["с"] = "с",
+ ["т"] = "т",
+ ["у"] = "у",
+ ["ѹ"] = "у", -- U+0478 uk, horizontal ligature
+ ["ꙋ"] = "у", -- U+0479 uk, vertical ligature
+ [cu_uk] = "у",
+ ["ф"] = "ф",
+ ["х"] = "х",
+ ["ѡ"] = "ѡ", --"ō"
+ ["ѿ"] = "ѡ", -- U+047f \
+ ["ѽ"] = "ѡ", -- U+047d > Omega variants
+ ["ꙍ"] = "ѡ", -- U+064D /
+ ["ц"] = "ц",
+ ["ч"] = "ч",
+ ["ш"] = "ш",
+ ["щ"] = "щ",
+ ["ъ"] = "ъ",
+ ["ы"] = "ы",
+ ["ꙑ"] = "ы", -- Old jery (U+a651) as used e.g. by the OCS Wikipedia.
+ ["ь"] = "ь",
+ ["ѣ"] = "ѣ",
+ ["ю"] = "ю",
+ ["ꙗ"] = "ꙗ", -- IOTIFIED A
+ ["ѥ"] = "ѥ",
+ ["ѧ"] = "ѧ",
+ ["ѩ"] = "ѩ",
+ ["ѫ"] = "ѫ",
+ ["ѭ"] = "ѭ",
+ ["ѯ"] = "ѯ",
+ ["ѱ"] = "ѱ",
+ ["ѳ"] = "ѳ",
+ ["ѵ"] = "ѵ",
+ ["ѷ"] = "ѵ", -- Why does this even have its own codepoint????
+}
+
+mappings["cu"] = {
+ ["а"] = 1,
+ ["б"] = 2,
+ ["в"] = 3,
+ ["г"] = 4,
+ ["д"] = 5,
+ ["є"] = 6,
+ ["ж"] = 7,
+ ["ѕ"] = 8,
+ ["ꙃ"] = 8, -- Dzělo, U+0292, alternative: dz U+01f3
+ ["з"] = 9,
+ ["ꙁ"] = 9, -- Zemlja
+ ["и"] = 10,
+ ["і"] = 10,
+ ["ї"] = 10,
+ ["ћ"] = 11,
+ ["к"] = 12,
+ ["л"] = 13,
+ ["м"] = 14,
+ ["н"] = 15,
+ ["о"] = 16,
+ ["п"] = 17,
+ ["р"] = 18,
+ ["с"] = 19,
+ ["т"] = 20,
+ ["у"] = 21,
+ ["ѹ"] = 21, -- U+0478 uk, horizontal ligature
+ ["ꙋ"] = 21, -- U+0479 uk, vertical ligature
+ [cu_uk] = 21,
+ ["ф"] = 22,
+ ["х"] = 23,
+ ["ѡ"] = 24, --"ō"
+ ["ѿ"] = 24, -- U+047f \
+ ["ѽ"] = 24, -- U+047d > Omega variants
+ ["ꙍ"] = 24, -- U+064D /
+ ["ц"] = 25,
+ ["ч"] = 26,
+ ["ш"] = 27,
+ ["щ"] = 28,
+ ["ъ"] = 29,
+ ["ы"] = 30,
+ ["ꙑ"] = 30, -- Old jery (U+a651) as used e.g. by the OCS Wikipedia.
+ ["ь"] = 31,
+ ["ѣ"] = 32,
+ ["ю"] = 33,
+ ["ꙗ"] = 34, -- IOTIFIED A
+ ["ѥ"] = 35,
+ ["ѧ"] = 36,
+ ["ѩ"] = 37,
+ ["ѫ"] = 38,
+ ["ѭ"] = 39,
+ ["ѯ"] = 40,
+ ["ѱ"] = 41,
+ ["ѳ"] = 42,
+ ["ѵ"] = 43,
+ ["ѷ"] = 43, -- Why does this even have its own codepoint????
+}
+
+adduppercaseentries ("cu")
+adduppercasemappings("cu")
+
+entries ["cu"] [cu_UK] = entries ["cu"] [cu_uk]
+mappings["cu"] [cu_UK] = mappings["cu"] [cu_uk]
+
+--- Polish (including the letters q, v, x)
+
+-- Cf. ftp://ftp.gust.org.pl/pub/GUST/bulletin/03/02-bl.pdf.
+
+replacements["pl"] = {
+ -- none
+}
+
+entries["pl"] = {
["a"] = "a", ["ą"] = "ą", ["b"] = "b", ["c"] = "c", ["ć"] = "ć",
["d"] = "d", ["e"] = "e", ["ę"] = "ę", ["f"] = "f", ["g"] = "g",
["h"] = "h", ["i"] = "i", ["j"] = "j", ["k"] = "k", ["l"] = "l",
@@ -334,7 +635,7 @@ sorters.entries["pl"] = {
["x"] = "x", ["y"] = "y", ["z"] = "z", ["ź"] = "ź", ["ż"] = "ż",
}
-sorters.mappings["pl"] = {
+mappings["pl"] = {
["a"] = 1, ["ą"] = 2, ["b"] = 3, ["c"] = 4, ["ć"] = 5,
["d"] = 6, ["e"] = 7, ["ę"] = 8, ["f"] = 9, ["g"] = 10,
["h"] = 11, ["i"] = 12, ["j"] = 13, ["k"] = 14, ["l"] = 15,
@@ -344,5 +645,2049 @@ sorters.mappings["pl"] = {
["x"] = 31, ["y"] = 32, ["z"] = 33, ["ź"] = 34, ["ż"] = 35,
}
-adduppercaseentries ('pl')
-adduppercasemappings('pl',1)
+adduppercaseentries ("pl")
+adduppercasemappings("pl")
+
+--- Czech
+-- Modified to treat quantities and other secondary characteristics indifferently.
+-- Cf. <http://racek.vlada.cz/usneseni/usneseni_webtest.nsf/WebGovRes/0AD8FEF4CC04B7A4C12571B6006D69D0?OpenDocument>
+-- (2.4.3; via <http://cs.wikipedia.org/wiki/Abecední_řazení#.C4.8Ce.C5.A1tina>).
+
+local cz_ch = uc(replacementoffset + 1)
+local cz_CH = uc(replacementoffset + 2) -- Is this actually used somewhere (e.g. with “adduppercaseentries”)?
+
+replacements["cz"] = {
+ [1] = { "ch", cz_ch }
+}
+
+entries["cz"] = {
+ ["a"] = "a", -- a
+ ["á"] = "a", -- aacute
+ ["b"] = "b", -- b
+ ["c"] = "c", -- c
+ ["č"] = "č", -- ccaron
+ ["d"] = "d", -- d
+ ["ď"] = "d", -- dcaron
+ ["e"] = "e", -- e
+ ["é"] = "e", -- eacute
+ ["ě"] = "e", -- ecaron
+ ["f"] = "f", -- f
+ ["g"] = "g", -- g
+ ["h"] = "h", -- h
+ [cz_ch] = "ch", -- ch
+ ["i"] = "i", -- i
+ ["í"] = "i", -- iacute
+ ["j"] = "j", -- j
+ ["k"] = "k", -- k
+ ["l"] = "l", -- l
+ ["m"] = "m", -- m
+ ["n"] = "n", -- n
+ ["ň"] = "n", -- ncaron
+ ["o"] = "o", -- o
+ ["ó"] = "o", -- ó
+ ["p"] = "p", -- p
+ ["q"] = "q", -- q
+ ["r"] = "r", -- r
+ ["ř"] = "ř", -- rcaron
+ ["s"] = "s", -- s
+ ["š"] = "š", -- scaron
+ ["t"] = "t", -- t
+ ["ť"] = "t", -- tcaron
+ ["u"] = "u", -- u
+ ["ú"] = "u", -- uacute
+ ["ů"] = "u", -- uring
+ ["v"] = "v", -- v
+ ["w"] = "w", -- w
+ ["x"] = "x", -- x
+ ["y"] = "y", -- y
+ ["ý"] = "y", -- yacute
+ ["z"] = "z", -- z
+ ["ž"] = "ž", -- zcaron
+}
+
+mappings["cz"] = {
+ ["a"] = 1, -- a
+ ["á"] = 1, -- aacute -> a
+ ["b"] = 2, -- b
+ ["c"] = 3, -- c
+ ["č"] = 4, -- ccaron
+ ["d"] = 5, -- d
+ ["ď"] = 5, -- dcaron -> ď
+ ["e"] = 6, -- e
+ ["é"] = 6, -- eacute -> e
+ ["ě"] = 6, -- ecaron -> e
+ ["f"] = 7, -- f
+ ["g"] = 8, -- g
+ ["h"] = 9, -- h
+ [cz_ch] = 10, -- ch
+ ["i"] = 11, -- i
+ ["í"] = 11, -- iacute -> i
+ ["j"] = 12, -- j
+ ["k"] = 13, -- k
+ ["l"] = 14, -- l
+ ["m"] = 15, -- m
+ ["n"] = 16, -- n
+ ["ň"] = 16, -- ncaron -> n
+ ["o"] = 17, -- o
+ ["ó"] = 17, -- o -> o
+ ["p"] = 18, -- p
+ ["q"] = 19, -- q
+ ["r"] = 20, -- r
+ ["ř"] = 21, -- rcaron
+ ["s"] = 22, -- s
+ ["š"] = 23, -- scaron
+ ["t"] = 24, -- t
+ ["ť"] = 24, -- tcaron -> t
+ ["u"] = 25, -- u
+ ["ú"] = 25, -- uacute -> u
+ ["ů"] = 25, -- uring -> u
+ ["v"] = 26, -- v
+ ["w"] = 27, -- w
+ ["x"] = 28, -- x
+ ["y"] = 29, -- y
+ ["ý"] = 29, -- yacute -> y
+ ["z"] = 30, -- z
+ ["ž"] = 31, -- zcaron Checksum: 42
+}
+
+adduppercaseentries ("cz")
+adduppercasemappings("cz") -- 1 can be option (but then we need a runtime variant)
+
+entries ["cz"][cz_CH] = entries ["cz"][cz_ch]
+mappings["cz"][cz_CH] = mappings["cz"][cz_ch]
+
+replacements["cs"] = replacements["cz"]
+entries ["cs"] = entries ["cz"]
+mappings ["cs"] = mappings ["cz"]
+
+--- Slovak.
+
+-- Vowel and consonant quantities, "ď", "ľ", "ň", "ť", "ô", and "ä" are treated
+-- indifferently as their base character, as in my dictionary. If you prefer them
+-- to affect collation order, then use the values given in the comments. We could
+-- define an additional vector for that.
+
+local sk_dz = uc(replacementoffset + 1)
+local sk_DZ = uc(replacementoffset + 2)
+local sk_dzh = uc(replacementoffset + 3)
+local sk_DZH = uc(replacementoffset + 4)
+local sk_ch = uc(replacementoffset + 5)
+local sk_CH = uc(replacementoffset + 6)
+
+replacements["sk"] = {
+ [1] = { "dz", sk_dz },
+ [2] = { "dž", sk_dzh },
+ [3] = { "ch", sk_ch },
+}
+
+entries["sk"] = {
+ ["a"] = "a",
+ ["á"] = "a", -- "á",
+ ["ä"] = "a", -- "ä",
+ ["b"] = "b",
+ ["c"] = "c",
+ ["č"] = "č",
+ ["d"] = "d",
+ ["ď"] = "d", -- "ď",
+ [sk_dz] = "dz",
+ [sk_dzh] = "dž",
+ ["e"] = "e",
+ ["é"] = "e", -- "é",
+ ["f"] = "f",
+ ["g"] = "g",
+ ["h"] = "h",
+ [sk_ch] = "ch",
+ ["i"] = "i",
+ ["í"] = "i", -- "í",
+ ["j"] = "j",
+ ["k"] = "k",
+ ["l"] = "l",
+ ["ĺ"] = "l", -- "ĺ",
+ ["ľ"] = "l", -- "ľ",
+ ["m"] = "m",
+ ["n"] = "n",
+ ["ň"] = "n", -- "ň",
+ ["o"] = "o",
+ ["ó"] = "o", -- "ó",
+ ["ô"] = "o", -- "ô",
+ ["p"] = "p",
+ ["q"] = "q",
+ ["r"] = "r",
+ ["ŕ"] = "r", -- "ŕ",
+ ["s"] = "s",
+ ["š"] = "š",
+ ["t"] = "t",
+ ["ť"] = "t", -- "ť",
+ ["u"] = "u",
+ ["ú"] = "u", -- "ú",
+ ["v"] = "v",
+ ["w"] = "w",
+ ["x"] = "x",
+ ["y"] = "y",
+ ["ý"] = "y", -- "ý",
+ ["z"] = "z",
+ ["ž"] = "ž",
+}
+
+mappings["sk"] = {
+ ["a"] = 1,
+ ["á"] = 1, -- 2,
+ ["ä"] = 1, -- 3,
+ ["b"] = 4,
+ ["c"] = 5,
+ ["č"] = 6,
+ ["d"] = 7,
+ ["ď"] = 7, -- 8,
+ [sk_dz] = 9,
+ [sk_dzh] = 10,
+ ["e"] = 11,
+ ["é"] = 11, -- 12,
+ ["f"] = 13,
+ ["g"] = 14,
+ ["h"] = 15,
+ [sk_ch] = 16,
+ ["i"] = 17,
+ ["í"] = 17, -- 18,
+ ["j"] = 19,
+ ["k"] = 20,
+ ["l"] = 21,
+ ["ĺ"] = 21, -- 22,
+ ["ľ"] = 21, -- 23,
+ ["m"] = 24,
+ ["n"] = 25,
+ ["ň"] = 25, -- 26,
+ ["o"] = 27,
+ ["ó"] = 27, -- 28,
+ ["ô"] = 27, -- 29,
+ ["p"] = 30,
+ ["q"] = 31,
+ ["r"] = 32,
+ ["ŕ"] = 32, -- 33,
+ ["s"] = 34,
+ ["š"] = 35,
+ ["t"] = 36,
+ ["ť"] = 36, -- 37,
+ ["u"] = 38,
+ ["ú"] = 38, -- 39,
+ ["v"] = 40,
+ ["w"] = 41,
+ ["x"] = 42,
+ ["y"] = 43,
+ ["ý"] = 43, -- 44,
+ ["z"] = 45,
+ ["ž"] = 46, -- Checksum: 46, přesně!
+}
+
+adduppercaseentries ("sk")
+adduppercasemappings("sk")
+
+entries ["sk"] [sk_DZ] = entries ["sk"] [sk_dz]
+mappings["sk"] [sk_DZ] = mappings["sk"] [sk_dz]
+entries ["sk"][sk_DZH] = entries ["sk"][sk_dzh]
+mappings["sk"][sk_DZH] = mappings["sk"][sk_dzh]
+entries ["sk"] [sk_CH] = entries ["sk"] [sk_ch]
+mappings["sk"] [sk_CH] = mappings["sk"] [sk_ch]
+
+--- Croatian
+
+local hr_dzh = uc(replacementoffset + 1)
+local hr_DZH = uc(replacementoffset + 2)
+local hr_lj = uc(replacementoffset + 3)
+local hr_LJ = uc(replacementoffset + 4)
+local hr_nj = uc(replacementoffset + 5)
+local hr_NJ = uc(replacementoffset + 6)
+
+replacements["hr"] = {
+ [1] = { "dž", hr_dzh },
+ [2] = { "lj", hr_lj },
+ [3] = { "nj", hr_nj },
+}
+
+entries["hr"] = {
+ ["a"] = "a", -- Why do you sometimes encounter “â” (where Old Slavonic
+ ["b"] = "b", -- has “ѣ”) and how does it collate?
+ ["c"] = "c",
+ ["č"] = "č",
+ ["ć"] = "ć",
+ ["d"] = "d",
+ [hr_dzh] = "dž",
+ ["đ"] = "đ",
+ ["e"] = "e",
+ ["f"] = "f",
+ ["g"] = "g",
+ ["h"] = "h",
+ ["i"] = "i",
+ ["j"] = "j",
+ ["k"] = "k",
+ ["l"] = "l",
+ [hr_lj] = "lj",
+ ["m"] = "m",
+ ["n"] = "n",
+ [hr_nj] = "nj",
+ ["o"] = "o",
+ ["p"] = "p",
+ ["r"] = "r",
+ ["s"] = "s",
+ ["š"] = "š",
+ ["t"] = "t",
+ ["u"] = "u",
+ ["v"] = "v",
+ ["z"] = "z",
+ ["ž"] = "ž",
+}
+
+mappings["hr"] = {
+ ["a"] = 1,
+ ["b"] = 2,
+ ["c"] = 3,
+ ["č"] = 4,
+ ["ć"] = 5,
+ ["d"] = 6,
+ [hr_dzh] = 7,
+ ["đ"] = 8,
+ ["e"] = 9,
+ ["f"] = 10,
+ ["g"] = 11,
+ ["h"] = 12,
+ ["i"] = 13,
+ ["j"] = 14,
+ ["k"] = 15,
+ ["l"] = 16,
+ [hr_lj] = 17,
+ ["m"] = 18,
+ ["n"] = 19,
+ [hr_nj] = 20,
+ ["o"] = 21,
+ ["p"] = 22,
+ ["r"] = 23,
+ ["s"] = 24,
+ ["š"] = 25,
+ ["t"] = 26,
+ ["u"] = 27,
+ ["v"] = 28,
+ ["z"] = 29,
+ ["ž"] = 30,
+}
+
+adduppercaseentries ("hr")
+adduppercasemappings("hr")
+
+entries ["hr"][hr_DZH] = entries ["hr"][hr_dzh]
+mappings["hr"][hr_DZH] = mappings["hr"][hr_dzh]
+entries ["hr"] [hr_LJ] = entries ["hr"] [hr_lj]
+mappings["hr"] [hr_LJ] = mappings["hr"] [hr_lj]
+entries ["hr"] [hr_NJ] = entries ["hr"] [hr_nj]
+mappings["hr"] [hr_NJ] = mappings["hr"] [hr_nj]
+
+--- Serbian
+
+replacements["sr"] = {
+ -- none
+}
+
+entries["sr"] = {
+ ["а"] = "а",
+ ["б"] = "б",
+ ["в"] = "в",
+ ["г"] = "г",
+ ["д"] = "д",
+ ["ђ"] = "ђ",
+ ["е"] = "е",
+ ["ж"] = "ж",
+ ["з"] = "з",
+ ["и"] = "и",
+ ["ј"] = "ј",
+ ["к"] = "к",
+ ["л"] = "л",
+ ["љ"] = "љ",
+ ["м"] = "м",
+ ["н"] = "н",
+ ["њ"] = "њ",
+ ["о"] = "о",
+ ["п"] = "п",
+ ["р"] = "р",
+ ["с"] = "с",
+ ["т"] = "т",
+ ["ћ"] = "ћ",
+ ["у"] = "у",
+ ["ф"] = "ф",
+ ["х"] = "х",
+ ["ц"] = "ц",
+ ["ч"] = "ч",
+ ["џ"] = "џ",
+ ["ш"] = "ш",
+}
+
+mappings["sr"] = {
+ ["а"] = 1,
+ ["б"] = 2,
+ ["в"] = 3,
+ ["г"] = 4,
+ ["д"] = 5,
+ ["ђ"] = 6,
+ ["е"] = 7,
+ ["ж"] = 8,
+ ["з"] = 9,
+ ["и"] = 10,
+ ["ј"] = 11,
+ ["к"] = 12,
+ ["л"] = 13,
+ ["љ"] = 14,
+ ["м"] = 15,
+ ["н"] = 16,
+ ["њ"] = 17,
+ ["о"] = 18,
+ ["п"] = 19,
+ ["р"] = 20,
+ ["с"] = 21,
+ ["т"] = 22,
+ ["ћ"] = 23,
+ ["у"] = 24,
+ ["ф"] = 25,
+ ["х"] = 26,
+ ["ц"] = 27,
+ ["ч"] = 28,
+ ["џ"] = 29,
+ ["ш"] = 30,
+}
+
+adduppercaseentries ("sr")
+adduppercasemappings("sr")
+
+--- Transliteration: Russian|ISO9-1995
+
+-- Keeping the same collation order as Russian (v.s.).
+-- Matches the tables from:
+-- http://bitbucket.org/phg/transliterator/src/tip/tex/context/third/transliterator/trans_tables_iso9.lua
+
+local ru_iso9_yer = uc(replacementoffset + 1)
+
+replacements["ru-iso9"] = {
+ [1] = { "''", ru_iso9_yer },
+}
+
+entries["ru-iso9"] = {
+ ["a"] = "a",
+ ["b"] = "b",
+ ["v"] = "v",
+ ["g"] = "g",
+ ["d"] = "d",
+ ["e"] = "e",
+ ["ë"] = "ë",
+ ["ž"] = "ž",
+ ["z"] = "z",
+ ["i"] = "i",
+ ["ì"] = "ì",
+ ["j"] = "j",
+ ["k"] = "k",
+ ["l"] = "l",
+ ["m"] = "m",
+ ["n"] = "n",
+ ["o"] = "o",
+ ["p"] = "p",
+ ["r"] = "r",
+ ["s"] = "s",
+ ["t"] = "t",
+ ["u"] = "u",
+ ["f"] = "f",
+ ["h"] = "h",
+ ["c"] = "c",
+ ["č"] = "č",
+ ["š"] = "š",
+ ["ŝ"] = "ŝ",
+ ["ʺ"] = "ʺ",
+ [ru_iso9_yer] = "ʺ",
+ ["y"] = "y",
+ ["ʹ"] = "ʹ",
+ ["'"] = "ʹ",
+ ["ě"] = "ě",
+ ["è"] = "è",
+ ["û"] = "û",
+ ["â"] = "â",
+ ["û"] = "û",
+ ["â"] = "â",
+}
+
+mappings["ru-iso9"] = {
+ ["a"] = 1,
+ ["b"] = 2,
+ ["v"] = 3,
+ ["g"] = 4,
+ ["d"] = 5,
+ ["e"] = 6,
+ ["ë"] = 6,
+ ["ž"] = 7,
+ ["z"] = 8,
+ ["i"] = 9,
+ ["ì"] = 9,
+ ["j"] = 10,
+ ["k"] = 11,
+ ["l"] = 12,
+ ["m"] = 13,
+ ["n"] = 14,
+ ["o"] = 15,
+ ["p"] = 16,
+ ["r"] = 17,
+ ["s"] = 18,
+ ["t"] = 19,
+ ["u"] = 20,
+ ["f"] = 21,
+ ["h"] = 22,
+ ["c"] = 23,
+ ["č"] = 24,
+ ["š"] = 25,
+ ["ŝ"] = 26,
+ ["ʺ"] = 27,
+ [ru_iso9_yer] = 27,
+ ["y"] = 28,
+ ["ʹ"] = 29,
+ ["'"] = 29,
+ ["ě"] = 30,
+ ["è"] = 31,
+ ["û"] = 32,
+ ["â"] = 33,
+ ["û"] = 34,
+ ["â"] = 35,
+}
+
+adduppercaseentries ("ru-iso9")
+adduppercasemappings("ru-iso9")
+
+--- Transliteration: Old Slavonic|scientific
+
+-- Matches the tables from:
+-- http://bitbucket.org/phg/transliterator/src/tip/tex/context/third/transliterator/trans_tables_scntfc.lua
+
+local ocs_scn_uk = uc(replacementoffset + 1)
+local ocs_scn_tshe = uc(replacementoffset + 2)
+local ocs_scn_sht = uc(replacementoffset + 3)
+local ocs_scn_ju = uc(replacementoffset + 4)
+local ocs_scn_ja = uc(replacementoffset + 5)
+local ocs_scn_je = uc(replacementoffset + 6)
+local ocs_scn_ijus = uc(replacementoffset + 7)
+local ocs_scn_ibigjus = uc(replacementoffset + 8)
+local ocs_scn_xi = uc(replacementoffset + 9)
+local ocs_scn_psi = uc(replacementoffset + 10)
+local ocs_scn_theta = uc(replacementoffset + 11)
+local ocs_scn_shch = uc(replacementoffset + 12)
+
+local ocs_scn_UK = uc(replacementoffset + 13)
+local ocs_scn_TSHE = uc(replacementoffset + 14)
+local ocs_scn_SHT = uc(replacementoffset + 15)
+local ocs_scn_JU = uc(replacementoffset + 16)
+local ocs_scn_JA = uc(replacementoffset + 17)
+local ocs_scn_JE = uc(replacementoffset + 18)
+local ocs_scn_IJUS = uc(replacementoffset + 19)
+local ocs_scn_IBIGJUS = uc(replacementoffset + 20)
+local ocs_scn_XI = uc(replacementoffset + 21)
+local ocs_scn_PSI = uc(replacementoffset + 22)
+local ocs_scn_THETA = uc(replacementoffset + 23)
+local ocs_scn_SHCH = uc(replacementoffset + 24)
+
+replacements["ocs-scn"] = {
+ [1] = { "ou", ocs_scn_uk },
+ [2] = { "g’", ocs_scn_tshe },
+ [3] = { "št", ocs_scn_sht },
+ [4] = { "ju", ocs_scn_ju },
+ [5] = { "ja", ocs_scn_ja },
+ [6] = { "je", ocs_scn_je },
+ [7] = { "ję", ocs_scn_ijus },
+ [8] = { "jǫ", ocs_scn_ibigjus },
+ [9] = { "ks", ocs_scn_xi },
+ [10] = { "ps", ocs_scn_psi },
+ [11] = { "th", ocs_scn_theta },
+ [12] = { "šč", ocs_scn_shch },
+}
+
+entries["ocs-scn"] = {
+ ["a"] = "a",
+ ["b"] = "b",
+ ["v"] = "v",
+ ["g"] = "g",
+ ["d"] = "d",
+ ["e"] = "e",
+ ["ž"] = "ž",
+ ["ʒ"] = "ʒ",
+ ["z"] = "z",
+ ["i"] = "i",
+ ["ï"] = "ï",
+ [ocs_scn_tshe] = "g’",
+ ["k"] = "k",
+ ["l"] = "l",
+ ["m"] = "m",
+ ["n"] = "n",
+ ["o"] = "o",
+ ["p"] = "p",
+ ["r"] = "r",
+ ["s"] = "s",
+ ["t"] = "t",
+ ["u"] = "u",
+ ["f"] = "f",
+ ["x"] = "x",
+ ["o"] = "o",
+ ["c"] = "c",
+ ["č"] = "č",
+ ["š"] = "š",
+ [ocs_scn_sht] = "št",
+ [ocs_scn_shch] = "šč",
+ ["ъ"] = "ъ",
+ ["y"] = "y",
+ [ocs_scn_uk] = "y",
+ ["ь"] = "ь",
+ ["ě"] = "ě",
+ [ocs_scn_ju] = "ju",
+ [ocs_scn_ja] = "ja",
+ [ocs_scn_je] = "je",
+ ["ę"] = "ę",
+ [ocs_scn_ijus] = "ję",
+ ["ǫ"] = "ǫ",
+[ocs_scn_ibigjus] = "jǫ",
+ [ocs_scn_xi] = "ks",
+ [ocs_scn_psi] = "ps",
+ [ocs_scn_theta] = "th",
+ ["ü"] = "ü",
+}
+
+mappings["ocs-scn"] = {
+ ["a"] = 1,
+ ["b"] = 2,
+ ["v"] = 3,
+ ["g"] = 4,
+ ["d"] = 5,
+ ["e"] = 6,
+ ["ž"] = 7,
+ ["ʒ"] = 8,
+ ["z"] = 9,
+ ["i"] = 10,
+ ["ï"] = 10,
+ [ocs_scn_tshe] = 11,
+ ["k"] = 12,
+ ["l"] = 13,
+ ["m"] = 14,
+ ["n"] = 15,
+ ["o"] = 16,
+ ["p"] = 17,
+ ["r"] = 18,
+ ["s"] = 19,
+ ["t"] = 20,
+ ["u"] = 21,
+ ["f"] = 22,
+ ["x"] = 23,
+ ["o"] = 24,
+ ["c"] = 25,
+ ["č"] = 26,
+ ["š"] = 27,
+ [ocs_scn_sht] = 28,
+ [ocs_scn_shch] = 28,
+ ["ъ"] = 29,
+ ["y"] = 30,
+ [ocs_scn_uk] = 30,
+ ["ь"] = 31,
+ ["ě"] = 32,
+ [ocs_scn_ju] = 33,
+ [ocs_scn_ja] = 34,
+ [ocs_scn_je] = 35,
+ ["ę"] = 36,
+ [ocs_scn_ijus] = 37,
+ ["ǫ"] = 38,
+[ocs_scn_ibigjus] = 39,
+ [ocs_scn_xi] = 40,
+ [ocs_scn_psi] = 41,
+ [ocs_scn_theta] = 42,
+ ["ü"] = 43,
+}
+
+adduppercaseentries ("ocs-scn")
+adduppercasemappings("ocs-scn")
+
+ entries["ocs-scn"][ocs_scn_UK ] = entries["ocs-scn"][ocs_scn_uk ]
+mappings["ocs-scn"][ocs_scn_UK ] = mappings["ocs-scn"][ocs_scn_uk ]
+
+ entries["ocs-scn"][ocs_scn_TSHE ] = entries["ocs-scn"][ocs_scn_tshe ]
+mappings["ocs-scn"][ocs_scn_TSHE ] = mappings["ocs-scn"][ocs_scn_tshe ]
+
+ entries["ocs-scn"][ocs_scn_SHT ] = entries["ocs-scn"][ocs_scn_sht ]
+mappings["ocs-scn"][ocs_scn_SHT ] = mappings["ocs-scn"][ocs_scn_sht ]
+
+ entries["ocs-scn"][ocs_scn_JU ] = entries["ocs-scn"][ocs_scn_ju ]
+mappings["ocs-scn"][ocs_scn_JU ] = mappings["ocs-scn"][ocs_scn_ju ]
+
+ entries["ocs-scn"][ocs_scn_JA ] = entries["ocs-scn"][ocs_scn_ja ]
+mappings["ocs-scn"][ocs_scn_JA ] = mappings["ocs-scn"][ocs_scn_ja ]
+
+ entries["ocs-scn"][ocs_scn_JE ] = entries["ocs-scn"][ocs_scn_je ]
+mappings["ocs-scn"][ocs_scn_JE ] = mappings["ocs-scn"][ocs_scn_je ]
+
+ entries["ocs-scn"][ocs_scn_IJUS ] = entries["ocs-scn"][ocs_scn_ijus ]
+mappings["ocs-scn"][ocs_scn_IJUS ] = mappings["ocs-scn"][ocs_scn_ijus ]
+
+ entries["ocs-scn"][ocs_scn_IBIGJUS] = entries["ocs-scn"][ocs_scn_ibigjus]
+mappings["ocs-scn"][ocs_scn_IBIGJUS] = mappings["ocs-scn"][ocs_scn_ibigjus]
+
+ entries["ocs-scn"][ocs_scn_XI ] = entries["ocs-scn"][ocs_scn_xi ]
+mappings["ocs-scn"][ocs_scn_XI ] = mappings["ocs-scn"][ocs_scn_xi ]
+
+ entries["ocs-scn"][ocs_scn_PSI ] = entries["ocs-scn"][ocs_scn_psi ]
+mappings["ocs-scn"][ocs_scn_PSI ] = mappings["ocs-scn"][ocs_scn_psi ]
+
+ entries["ocs-scn"][ocs_scn_THETA ] = entries["ocs-scn"][ocs_scn_theta ]
+mappings["ocs-scn"][ocs_scn_THETA ] = mappings["ocs-scn"][ocs_scn_theta ]
+
+ entries["ocs-scn"][ocs_scn_SHCH ] = entries["ocs-scn"][ocs_scn_shch ]
+mappings["ocs-scn"][ocs_scn_SHCH ] = mappings["ocs-scn"][ocs_scn_shch ]
+
+--- Norwegian (bokmål).
+
+replacements["no"] = { --[[ None, do you miss any? ]] }
+
+entries["no"] = {
+ ["a"] = "a",
+ ["b"] = "b",
+ ["c"] = "c",
+ ["d"] = "d",
+ ["e"] = "e",
+ ["f"] = "f",
+ ["g"] = "g",
+ ["h"] = "h",
+ ["i"] = "i",
+ ["j"] = "j",
+ ["k"] = "k",
+ ["l"] = "l",
+ ["m"] = "m",
+ ["n"] = "n",
+ ["o"] = "o",
+ ["p"] = "p",
+ ["q"] = "q",
+ ["r"] = "r",
+ ["s"] = "s",
+ ["t"] = "t",
+ ["u"] = "u",
+ ["v"] = "v",
+ ["w"] = "w",
+ ["x"] = "x",
+ ["y"] = "y",
+ ["z"] = "z",
+ ["æ"] = "æ",
+ ["ø"] = "ø",
+ ["å"] = "å",
+}
+
+mappings["no"] = {
+ ["a"] = 1,
+ ["b"] = 2,
+ ["c"] = 3,
+ ["d"] = 4,
+ ["e"] = 5,
+ ["f"] = 6,
+ ["g"] = 7,
+ ["h"] = 8,
+ ["i"] = 9,
+ ["j"] = 10,
+ ["k"] = 11,
+ ["l"] = 12,
+ ["m"] = 13,
+ ["n"] = 14,
+ ["o"] = 15,
+ ["p"] = 16,
+ ["q"] = 17,
+ ["r"] = 18,
+ ["s"] = 19,
+ ["t"] = 20,
+ ["u"] = 21,
+ ["v"] = 22,
+ ["w"] = 23,
+ ["x"] = 24,
+ ["y"] = 25,
+ ["z"] = 26,
+ ["æ"] = 27,
+ ["ø"] = 28,
+ ["å"] = 29,
+}
+
+adduppercaseentries ("no")
+adduppercasemappings("no")
+
+--- Danish (-> Norwegian).
+
+replacements["da"] = { --[[ None, do you miss any? ]] }
+ entries["da"] = entries["no"]
+ mappings["da"] = mappings["no"]
+
+--- Swedish
+
+replacements["sv"] = { --[[ None, do you miss any? ]] }
+
+entries["sv"] = {
+ ["a"] = "a",
+ ["b"] = "b",
+ ["c"] = "c",
+ ["d"] = "d",
+ ["e"] = "e",
+ ["f"] = "f",
+ ["g"] = "g",
+ ["h"] = "h",
+ ["i"] = "i",
+ ["j"] = "j",
+ ["k"] = "k",
+ ["l"] = "l",
+ ["m"] = "m",
+ ["n"] = "n",
+ ["o"] = "o",
+ ["p"] = "p",
+ ["q"] = "q",
+ ["r"] = "r",
+ ["s"] = "s",
+ ["t"] = "t",
+ ["u"] = "u",
+ ["v"] = "v",
+ ["w"] = "w",
+ ["x"] = "x",
+ ["y"] = "y",
+ ["z"] = "z",
+ ["å"] = "å",
+ ["ä"] = "ä",
+ ["ö"] = "ö",
+}
+
+mappings["sv"] = {
+ ["a"] = 1,
+ ["b"] = 2,
+ ["c"] = 3,
+ ["d"] = 4,
+ ["e"] = 5,
+ ["f"] = 6,
+ ["g"] = 7,
+ ["h"] = 8,
+ ["i"] = 9,
+ ["j"] = 10,
+ ["k"] = 11,
+ ["l"] = 12,
+ ["m"] = 13,
+ ["n"] = 14,
+ ["o"] = 15,
+ ["p"] = 16,
+ ["q"] = 17,
+ ["r"] = 18,
+ ["s"] = 19,
+ ["t"] = 20,
+ ["u"] = 21,
+ ["v"] = 22,
+ ["w"] = 23,
+ ["x"] = 24,
+ ["y"] = 25,
+ ["z"] = 26,
+ ["å"] = 27,
+ ["ä"] = 28,
+ ["ö"] = 29,
+}
+
+adduppercaseentries ("sv")
+adduppercasemappings("sv")
+
+--- Icelandic
+
+-- Treating quantities as allographs.
+
+replacements["is"] = { --[[ None, do you miss any? ]] }
+
+entries["is"] = {
+ ["a"] = "a",
+ ["á"] = "a",
+ ["b"] = "b",
+ ["d"] = "d",
+ ["ð"] = "ð",
+ ["e"] = "e",
+ ["é"] = "e",
+ ["f"] = "f",
+ ["g"] = "g",
+ ["h"] = "h",
+ ["i"] = "i",
+ ["í"] = "i",
+ ["j"] = "j",
+ ["k"] = "k",
+ ["l"] = "l",
+ ["m"] = "m",
+ ["n"] = "n",
+ ["o"] = "o",
+ ["ó"] = "o",
+ ["p"] = "p",
+ ["r"] = "r",
+ ["s"] = "s",
+ ["t"] = "t",
+ ["u"] = "u",
+ ["ú"] = "u",
+ ["v"] = "v",
+ ["x"] = "x",
+ ["y"] = "y",
+ ["ý"] = "y",
+ ["þ"] = "þ",
+ ["æ"] = "æ",
+ ["ö"] = "ö",
+}
+
+mappings["is"] = {
+ ["a"] = 1,
+ ["á"] = 1,
+ ["b"] = 2,
+ ["d"] = 3,
+ ["ð"] = 4,
+ ["e"] = 5,
+ ["é"] = 5,
+ ["f"] = 6,
+ ["g"] = 7,
+ ["h"] = 8,
+ ["i"] = 9,
+ ["í"] = 9,
+ ["j"] = 10,
+ ["k"] = 11,
+ ["l"] = 12,
+ ["m"] = 13,
+ ["n"] = 14,
+ ["o"] = 15,
+ ["ó"] = 15,
+ ["p"] = 16,
+ ["r"] = 17,
+ ["s"] = 18,
+ ["t"] = 19,
+ ["u"] = 20,
+ ["ú"] = 20,
+ ["v"] = 21,
+ ["x"] = 22,
+ ["y"] = 23,
+ ["ý"] = 23,
+ ["þ"] = 24,
+ ["æ"] = 25,
+ ["ö"] = 26,
+}
+
+adduppercaseentries ("is")
+adduppercasemappings("is")
+
+--- Greek
+
+replacements["gr"] = { --[[ None, do you miss any? ]] }
+
+entries["gr"] = {
+ ["α"] = "α",
+ ["ά"] = "α",
+ ["ὰ"] = "α",
+ ["ᾶ"] = "α",
+ ["ᾳ"] = "α",
+ ["ἀ"] = "α",
+ ["ἁ"] = "α",
+ ["ἄ"] = "α",
+ ["ἂ"] = "α",
+ ["ἆ"] = "α",
+ ["ἁ"] = "α",
+ ["ἅ"] = "α",
+ ["ἃ"] = "α",
+ ["ἇ"] = "α",
+ ["ᾁ"] = "α",
+ ["ᾴ"] = "α",
+ ["ᾲ"] = "α",
+ ["ᾷ"] = "α",
+ ["ᾄ"] = "α",
+ ["ᾂ"] = "α",
+ ["ᾅ"] = "α",
+ ["ᾃ"] = "α",
+ ["ᾆ"] = "α",
+ ["ᾇ"] = "α",
+ ["β"] = "β",
+ ["γ"] = "γ",
+ ["δ"] = "δ",
+ ["ε"] = "ε",
+ ["έ"] = "ε",
+ ["ὲ"] = "ε",
+ ["ἐ"] = "ε",
+ ["ἔ"] = "ε",
+ ["ἒ"] = "ε",
+ ["ἑ"] = "ε",
+ ["ἕ"] = "ε",
+ ["ἓ"] = "ε",
+ ["ζ"] = "ζ",
+ ["η"] = "η",
+ ["η"] = "η",
+ ["ή"] = "η",
+ ["ὴ"] = "η",
+ ["ῆ"] = "η",
+ ["ῃ"] = "η",
+ ["ἠ"] = "η",
+ ["ἤ"] = "η",
+ ["ἢ"] = "η",
+ ["ἦ"] = "η",
+ ["ᾐ"] = "η",
+ ["ἡ"] = "η",
+ ["ἥ"] = "η",
+ ["ἣ"] = "η",
+ ["ἧ"] = "η",
+ ["ᾑ"] = "η",
+ ["ῄ"] = "η",
+ ["ῂ"] = "η",
+ ["ῇ"] = "η",
+ ["ᾔ"] = "η",
+ ["ᾒ"] = "η",
+ ["ᾕ"] = "η",
+ ["ᾓ"] = "η",
+ ["ᾖ"] = "η",
+ ["ᾗ"] = "η",
+ ["θ"] = "θ",
+ ["ι"] = "ι",
+ ["ί"] = "ι",
+ ["ὶ"] = "ι",
+ ["ῖ"] = "ι",
+ ["ἰ"] = "ι",
+ ["ἴ"] = "ι",
+ ["ἲ"] = "ι",
+ ["ἶ"] = "ι",
+ ["ἱ"] = "ι",
+ ["ἵ"] = "ι",
+ ["ἳ"] = "ι",
+ ["ἷ"] = "ι",
+ ["ϊ"] = "ι",
+ ["ΐ"] = "ι",
+ ["ῒ"] = "ι",
+ ["ῗ"] = "ι",
+ ["κ"] = "κ",
+ ["λ"] = "λ",
+ ["μ"] = "μ",
+ ["ν"] = "ν",
+ ["ξ"] = "ξ",
+ ["ο"] = "ο",
+ ["ό"] = "ο",
+ ["ὸ"] = "ο",
+ ["ὀ"] = "ο",
+ ["ὄ"] = "ο",
+ ["ὂ"] = "ο",
+ ["ὁ"] = "ο",
+ ["ὅ"] = "ο",
+ ["ὃ"] = "ο",
+ ["π"] = "π",
+ ["ρ"] = "ρ",
+ ["ῤ"] = "ῤ",
+ ["ῥ"] = "ῥ",
+ ["σ"] = "σ",
+ ["ς"] = "ς",
+ ["τ"] = "τ",
+ ["υ"] = "υ",
+ ["ύ"] = "υ",
+ ["ὺ"] = "υ",
+ ["ῦ"] = "υ",
+ ["ὐ"] = "υ",
+ ["ὔ"] = "υ",
+ ["ὒ"] = "υ",
+ ["ὖ"] = "υ",
+ ["ὑ"] = "υ",
+ ["ὕ"] = "υ",
+ ["ὓ"] = "υ",
+ ["ὗ"] = "υ",
+ ["ϋ"] = "υ",
+ ["ΰ"] = "υ",
+ ["ῢ"] = "υ",
+ ["ῧ"] = "υ",
+ ["φ"] = "φ",
+ ["χ"] = "χ",
+ ["ψ"] = "ω",
+ ["ω"] = "ω",
+ ["ώ"] = "ω",
+ ["ὼ"] = "ω",
+ ["ῶ"] = "ω",
+ ["ῳ"] = "ω",
+ ["ὠ"] = "ω",
+ ["ὤ"] = "ω",
+ ["ὢ"] = "ω",
+ ["ὦ"] = "ω",
+ ["ᾠ"] = "ω",
+ ["ὡ"] = "ω",
+ ["ὥ"] = "ω",
+ ["ὣ"] = "ω",
+ ["ὧ"] = "ω",
+ ["ᾡ"] = "ω",
+ ["ῴ"] = "ω",
+ ["ῲ"] = "ω",
+ ["ῷ"] = "ω",
+ ["ᾤ"] = "ω",
+ ["ᾢ"] = "ω",
+ ["ᾥ"] = "ω",
+ ["ᾣ"] = "ω",
+ ["ᾦ"] = "ω",
+ ["ᾧ"] = "ω",
+}
+
+mappings["gr"] = {
+ ["α"] = 1,
+ ["ά"] = 1,
+ ["ὰ"] = 1,
+ ["ᾶ"] = 1,
+ ["ᾳ"] = 1,
+ ["ἀ"] = 1,
+ ["ἁ"] = 1,
+ ["ἄ"] = 1,
+ ["ἂ"] = 1,
+ ["ἆ"] = 1,
+ ["ἁ"] = 1,
+ ["ἅ"] = 1,
+ ["ἃ"] = 1,
+ ["ἇ"] = 1,
+ ["ᾁ"] = 1,
+ ["ᾴ"] = 1,
+ ["ᾲ"] = 1,
+ ["ᾷ"] = 1,
+ ["ᾄ"] = 1,
+ ["ᾂ"] = 1,
+ ["ᾅ"] = 1,
+ ["ᾃ"] = 1,
+ ["ᾆ"] = 1,
+ ["ᾇ"] = 1,
+ ["β"] = 2,
+ ["γ"] = 3,
+ ["δ"] = 4,
+ ["ε"] = 5,
+ ["έ"] = 5,
+ ["ὲ"] = 5,
+ ["ἐ"] = 5,
+ ["ἔ"] = 5,
+ ["ἒ"] = 5,
+ ["ἑ"] = 5,
+ ["ἕ"] = 5,
+ ["ἓ"] = 5,
+ ["ζ"] = 6,
+ ["η"] = 7,
+ ["η"] = 7,
+ ["ή"] = 7,
+ ["ὴ"] = 7,
+ ["ῆ"] = 7,
+ ["ῃ"] = 7,
+ ["ἠ"] = 7,
+ ["ἤ"] = 7,
+ ["ἢ"] = 7,
+ ["ἦ"] = 7,
+ ["ᾐ"] = 7,
+ ["ἡ"] = 7,
+ ["ἥ"] = 7,
+ ["ἣ"] = 7,
+ ["ἧ"] = 7,
+ ["ᾑ"] = 7,
+ ["ῄ"] = 7,
+ ["ῂ"] = 7,
+ ["ῇ"] = 7,
+ ["ᾔ"] = 7,
+ ["ᾒ"] = 7,
+ ["ᾕ"] = 7,
+ ["ᾓ"] = 7,
+ ["ᾖ"] = 7,
+ ["ᾗ"] = 7,
+ ["θ"] = 8,
+ ["ι"] = 9,
+ ["ί"] = 9,
+ ["ὶ"] = 9,
+ ["ῖ"] = 9,
+ ["ἰ"] = 9,
+ ["ἴ"] = 9,
+ ["ἲ"] = 9,
+ ["ἶ"] = 9,
+ ["ἱ"] = 9,
+ ["ἵ"] = 9,
+ ["ἳ"] = 9,
+ ["ἷ"] = 9,
+ ["ϊ"] = 9,
+ ["ΐ"] = 9,
+ ["ῒ"] = 9,
+ ["ῗ"] = 9,
+ ["κ"] = 10,
+ ["λ"] = 11,
+ ["μ"] = 12,
+ ["ν"] = 13,
+ ["ξ"] = 14,
+ ["ο"] = 15,
+ ["ό"] = 15,
+ ["ὸ"] = 15,
+ ["ὀ"] = 15,
+ ["ὄ"] = 15,
+ ["ὂ"] = 15,
+ ["ὁ"] = 15,
+ ["ὅ"] = 15,
+ ["ὃ"] = 15,
+ ["π"] = 16,
+ ["ρ"] = 17,
+ ["ῤ"] = 17,
+ ["ῥ"] = 17,
+ ["σ"] = 18,
+ ["ς"] = 18,
+ ["τ"] = 19,
+ ["υ"] = 20,
+ ["ύ"] = 20,
+ ["ὺ"] = 20,
+ ["ῦ"] = 20,
+ ["ὐ"] = 20,
+ ["ὔ"] = 20,
+ ["ὒ"] = 20,
+ ["ὖ"] = 20,
+ ["ὑ"] = 20,
+ ["ὕ"] = 20,
+ ["ὓ"] = 20,
+ ["ὗ"] = 20,
+ ["ϋ"] = 20,
+ ["ΰ"] = 20,
+ ["ῢ"] = 20,
+ ["ῧ"] = 20,
+ ["φ"] = 21,
+ ["χ"] = 22,
+ ["ψ"] = 23,
+ ["ω"] = 24,
+ ["ώ"] = 24,
+ ["ὼ"] = 24,
+ ["ῶ"] = 24,
+ ["ῳ"] = 24,
+ ["ὠ"] = 24,
+ ["ὤ"] = 24,
+ ["ὢ"] = 24,
+ ["ὦ"] = 24,
+ ["ᾠ"] = 24,
+ ["ὡ"] = 24,
+ ["ὥ"] = 24,
+ ["ὣ"] = 24,
+ ["ὧ"] = 24,
+ ["ᾡ"] = 24,
+ ["ῴ"] = 24,
+ ["ῲ"] = 24,
+ ["ῷ"] = 24,
+ ["ᾤ"] = 24,
+ ["ᾢ"] = 24,
+ ["ᾥ"] = 24,
+ ["ᾣ"] = 24,
+ ["ᾦ"] = 24,
+ ["ᾧ"] = 24,
+}
+
+adduppercaseentries ("gr")
+adduppercasemappings("gr")
+
+--- Latin
+
+-- Treating the post-classical fricatives “j” and “v” as “i” and “u”
+-- respectively.
+
+replacements["la"] = {
+ [1] = { "æ", "ae" },
+}
+
+entries["la"] = {
+ ["a"] = "a",
+ ["ā"] = "a",
+ ["ă"] = "a",
+ ["b"] = "b",
+ ["c"] = "c",
+ ["d"] = "d",
+ ["e"] = "e",
+ ["ē"] = "e",
+ ["ĕ"] = "e",
+ ["f"] = "f",
+ ["g"] = "g",
+ ["h"] = "h",
+ ["i"] = "i",
+ ["ī"] = "i",
+ ["ĭ"] = "i",
+ ["j"] = "i",
+ ["k"] = "k",
+ ["l"] = "l",
+ ["m"] = "m",
+ ["n"] = "n",
+ ["o"] = "o",
+ ["ō"] = "o",
+ ["ŏ"] = "o",
+ ["p"] = "p",
+ ["q"] = "q",
+ ["r"] = "r",
+ ["s"] = "s",
+ ["t"] = "t",
+ ["u"] = "u",
+ ["ū"] = "u",
+ ["ŭ"] = "u",
+ ["v"] = "u",
+ ["w"] = "w",
+ ["x"] = "x",
+ ["y"] = "y",
+ ["ȳ"] = "y", -- Should exist in Greek words.
+ ["y̆"] = "y", -- Should exist in Greek words.
+ ["z"] = "z",
+}
+
+mappings["la"] = {
+ ["a"] = 1,
+ ["ā"] = 1,
+ ["ă"] = 1,
+ ["b"] = 2,
+ ["c"] = 3,
+ ["d"] = 4,
+ ["e"] = 5,
+ ["ē"] = 5,
+ ["ĕ"] = 5,
+ ["f"] = 6,
+ ["g"] = 7,
+ ["h"] = 8,
+ ["i"] = 9,
+ ["ī"] = 9,
+ ["ĭ"] = 9,
+ ["j"] = 9,
+ ["k"] = 10,
+ ["l"] = 11,
+ ["m"] = 12,
+ ["n"] = 13,
+ ["o"] = 14,
+ ["ō"] = 14,
+ ["ŏ"] = 14,
+ ["p"] = 15,
+ ["q"] = 16,
+ ["r"] = 17,
+ ["s"] = 18,
+ ["t"] = 19,
+ ["u"] = 20,
+ ["ū"] = 20,
+ ["ŭ"] = 20,
+ ["v"] = 20,
+ ["w"] = 21,
+ ["x"] = 22,
+ ["y"] = 23,
+ ["ȳ"] = 23,
+ ["y̆"] = 23,
+ ["z"] = 24,
+}
+
+adduppercaseentries ("la")
+adduppercasemappings("la")
+
+--- Italian
+
+replacements["it"] = { --[[ None, do you miss any? ]] }
+
+entries["it"] = {
+ ["a"] = "a",
+ ["á"] = "a",
+ ["b"] = "b",
+ ["c"] = "c",
+ ["d"] = "d",
+ ["e"] = "e",
+ ["é"] = "e",
+ ["è"] = "e",
+ ["f"] = "f",
+ ["g"] = "g",
+ ["h"] = "h",
+ ["i"] = "i",
+ ["í"] = "i",
+ ["ì"] = "i",
+ ["j"] = "i",
+ ["k"] = "k",
+ ["l"] = "l",
+ ["m"] = "m",
+ ["n"] = "n",
+ ["o"] = "o",
+ ["ó"] = "o",
+ ["ò"] = "o",
+ ["p"] = "p",
+ ["q"] = "q",
+ ["r"] = "r",
+ ["s"] = "s",
+ ["t"] = "t",
+ ["u"] = "u",
+ ["ú"] = "u",
+ ["ù"] = "u",
+ ["v"] = "u",
+ ["w"] = "w",
+ ["x"] = "x",
+ ["y"] = "y",
+ ["z"] = "z",
+}
+
+mappings["it"] = {
+ ["a"] = 1,
+ ["á"] = 1,
+ ["b"] = 2,
+ ["c"] = 3,
+ ["d"] = 4,
+ ["e"] = 5,
+ ["é"] = 5,
+ ["è"] = 5,
+ ["f"] = 6,
+ ["g"] = 7,
+ ["h"] = 8,
+ ["i"] = 9,
+ ["í"] = 9,
+ ["ì"] = 9,
+ ["j"] = 10,
+ ["k"] = 11,
+ ["l"] = 12,
+ ["m"] = 13,
+ ["n"] = 14,
+ ["o"] = 15,
+ ["ó"] = 15,
+ ["ò"] = 15,
+ ["p"] = 16,
+ ["q"] = 17,
+ ["r"] = 18,
+ ["s"] = 19,
+ ["t"] = 20,
+ ["u"] = 21,
+ ["ú"] = 21,
+ ["ù"] = 21,
+ ["v"] = 22,
+ ["w"] = 23,
+ ["x"] = 24,
+ ["y"] = 25,
+ ["z"] = 26,
+}
+
+adduppercaseentries ("it")
+adduppercasemappings("it")
+
+--- Romanian
+
+
+replacements["ro"] = { --[[ None, do you miss any? ]] }
+
+entries["ro"] = {
+ ["a"] = "a",
+ ["ă"] = "ă",
+ ["â"] = "â",
+ ["b"] = "b",
+ ["c"] = "c",
+ ["d"] = "d",
+ ["e"] = "e",
+ ["f"] = "f",
+ ["g"] = "g",
+ ["h"] = "h",
+ ["i"] = "i",
+ ["î"] = "î",
+ ["j"] = "j",
+ ["k"] = "k",
+ ["l"] = "l",
+ ["m"] = "m",
+ ["n"] = "n",
+ ["o"] = "o",
+ ["p"] = "p",
+ ["q"] = "q",
+ ["r"] = "r",
+ ["s"] = "s",
+ ["ș"] = "ș",
+ ["t"] = "t",
+ ["ț"] = "ț",
+ ["u"] = "u",
+ ["v"] = "v",
+ ["w"] = "w",
+ ["x"] = "x",
+ ["y"] = "y",
+ ["z"] = "z",
+}
+
+mappings["ro"] = {
+ ["a"] = 1,
+ ["ă"] = 2,
+ ["â"] = 3,
+ ["b"] = 4,
+ ["c"] = 5,
+ ["d"] = 6,
+ ["e"] = 7,
+ ["f"] = 8,
+ ["g"] = 9,
+ ["h"] = 10,
+ ["i"] = 11,
+ ["î"] = 12,
+ ["j"] = 13,
+ ["k"] = 14,
+ ["l"] = 15,
+ ["m"] = 16,
+ ["n"] = 17,
+ ["o"] = 18,
+ ["p"] = 19,
+ ["q"] = 20,
+ ["r"] = 21,
+ ["s"] = 22,
+ ["ș"] = 23,
+ ["t"] = 24,
+ ["ț"] = 25,
+ ["u"] = 26,
+ ["v"] = 27,
+ ["w"] = 28,
+ ["x"] = 29,
+ ["y"] = 30,
+ ["z"] = 31,
+}
+
+adduppercaseentries ("ro")
+adduppercasemappings("ro")
+
+--- Spanish
+
+replacements["es"] = { --[[ None, do you miss any? ]] }
+
+entries["es"] = {
+ ["a"] = "a",
+ ["á"] = "a",
+ ["b"] = "b",
+ ["c"] = "c",
+ ["d"] = "d",
+ ["e"] = "e",
+ ["é"] = "e",
+ ["f"] = "f",
+ ["g"] = "g",
+ ["h"] = "h",
+ ["i"] = "i",
+ ["í"] = "i",
+ ["j"] = "j",
+ ["k"] = "k",
+ ["l"] = "l",
+ ["m"] = "m",
+ ["n"] = "n",
+ ["ñ"] = "ñ",
+ ["o"] = "o",
+ ["ó"] = "o",
+ ["p"] = "p",
+ ["q"] = "q",
+ ["r"] = "r",
+ ["s"] = "s",
+ ["t"] = "t",
+ ["u"] = "u",
+ ["ú"] = "u",
+ ["ü"] = "u",
+ ["v"] = "v",
+ ["w"] = "w",
+ ["x"] = "x",
+ ["y"] = "y",
+ ["z"] = "z",
+}
+
+mappings["es"] = {
+ ["a"] = 1,
+ ["á"] = 1,
+ ["b"] = 2,
+ ["c"] = 3,
+ ["d"] = 4,
+ ["e"] = 5,
+ ["é"] = 5,
+ ["f"] = 6,
+ ["g"] = 7,
+ ["h"] = 8,
+ ["i"] = 9,
+ ["í"] = 9,
+ ["j"] = 10,
+ ["k"] = 11,
+ ["l"] = 12,
+ ["m"] = 13,
+ ["n"] = 14,
+ ["ñ"] = 15,
+ ["o"] = 16,
+ ["ó"] = 16,
+ ["p"] = 17,
+ ["q"] = 18,
+ ["r"] = 19,
+ ["s"] = 20,
+ ["t"] = 21,
+ ["u"] = 22,
+ ["ú"] = 22,
+ ["ü"] = 22,
+ ["v"] = 23,
+ ["w"] = 24,
+ ["x"] = 25,
+ ["y"] = 26,
+ ["z"] = 27,
+}
+
+adduppercaseentries ("es")
+adduppercasemappings("es")
+
+--- Portuguese
+
+replacements["pt"] = { --[[ None, do you miss any? ]] }
+
+entries["pt"] = {
+ ["a"] = "a",
+ ["á"] = "a",
+ ["â"] = "a",
+ ["ã"] = "a",
+ ["à"] = "a",
+ ["b"] = "b",
+ ["c"] = "c",
+ ["ç"] = "c",
+ ["d"] = "d",
+ ["e"] = "e",
+ ["é"] = "e",
+ ["ê"] = "e",
+ ["f"] = "f",
+ ["g"] = "g",
+ ["h"] = "h",
+ ["i"] = "i",
+ ["í"] = "i",
+ ["j"] = "j",
+ ["k"] = "k",
+ ["l"] = "l",
+ ["m"] = "m",
+ ["n"] = "n",
+ ["o"] = "o",
+ ["ó"] = "o",
+ ["ô"] = "o",
+ ["õ"] = "o",
+ ["p"] = "p",
+ ["q"] = "q",
+ ["r"] = "r",
+ ["s"] = "s",
+ ["t"] = "t",
+ ["u"] = "u",
+ ["ú"] = "u",
+ ["ü"] = "u", -- qüinqüelíngüe
+ ["v"] = "v",
+ ["w"] = "w",
+ ["x"] = "x",
+ ["y"] = "y",
+ ["z"] = "z",
+}
+
+mappings["pt"] = {
+ ["a"] = 1,
+ ["á"] = 1,
+ ["â"] = 1,
+ ["ã"] = 1,
+ ["à"] = 1,
+ ["b"] = 2,
+ ["c"] = 3,
+ ["ç"] = 3,
+ ["d"] = 4,
+ ["e"] = 5,
+ ["é"] = 5,
+ ["ê"] = 5,
+ ["f"] = 6,
+ ["g"] = 7,
+ ["h"] = 8,
+ ["i"] = 9,
+ ["í"] = 9,
+ ["j"] = 10,
+ ["k"] = 11,
+ ["l"] = 12,
+ ["m"] = 13,
+ ["n"] = 14,
+ ["o"] = 15,
+ ["ó"] = 15,
+ ["ô"] = 15,
+ ["õ"] = 15,
+ ["p"] = 16,
+ ["q"] = 17,
+ ["r"] = 18,
+ ["s"] = 19,
+ ["t"] = 20,
+ ["u"] = 21,
+ ["ú"] = 21,
+ ["ü"] = 21,
+ ["v"] = 22,
+ ["w"] = 23,
+ ["x"] = 24,
+ ["y"] = 25,
+ ["z"] = 26,
+}
+
+adduppercaseentries ("pt")
+adduppercasemappings("pt")
+
+
+--- Lithuanian
+
+local lt_ch = uc(replacementoffset + 1)
+local lt_CH = uc(replacementoffset + 2)
+
+replacements["lt"] = {
+ [1] = { "ch", lt_ch }
+}
+
+entries["lt"] = {
+ ["a"] = "a",
+ ["ą"] = "a",
+ ["b"] = "b",
+ ["c"] = "c",
+ [lt_ch] = "c",
+ ["č"] = "č",
+ ["d"] = "d",
+ ["e"] = "e",
+ ["ę"] = "e",
+ ["ė"] = "e",
+ ["f"] = "f",
+ ["g"] = "g",
+ ["h"] = "h",
+ ["i"] = "i",
+ ["į"] = "i",
+ ["y"] = "i",
+ ["j"] = "j",
+ ["k"] = "k",
+ ["l"] = "l",
+ ["m"] = "m",
+ ["n"] = "n",
+ ["o"] = "o",
+ ["p"] = "p",
+ ["r"] = "r",
+ ["s"] = "s",
+ ["š"] = "š",
+ ["t"] = "t",
+ ["u"] = "u",
+ ["ų"] = "u",
+ ["ū"] = "u",
+ ["v"] = "v",
+ ["z"] = "z",
+ ["ž"] = "ž",
+}
+
+mappings["lt"] = {
+ ["a"] = 1,
+ ["ą"] = 1,
+ ["b"] = 2,
+ ["c"] = 3,
+ [lt_ch] = 3,
+ ["č"] = 4,
+ ["d"] = 5,
+ ["e"] = 6,
+ ["ę"] = 6,
+ ["ė"] = 6,
+ ["f"] = 7,
+ ["g"] = 8,
+ ["h"] = 9,
+ ["i"] = 10,
+ ["į"] = 10,
+ ["y"] = 10,
+ ["j"] = 11,
+ ["k"] = 12,
+ ["l"] = 13,
+ ["m"] = 14,
+ ["n"] = 15,
+ ["o"] = 16,
+ ["p"] = 17,
+ ["r"] = 18,
+ ["s"] = 19,
+ ["š"] = 20,
+ ["t"] = 21,
+ ["u"] = 22,
+ ["ų"] = 22,
+ ["ū"] = 22,
+ ["v"] = 23,
+ ["z"] = 24,
+ ["ž"] = 25,
+}
+
+adduppercaseentries ("lt")
+adduppercasemappings("lt")
+
+entries ["lt"][lt_CH] = entries ["lt"][lt_ch]
+mappings["lt"][lt_CH] = mappings["lt"][lt_ch]
+
+--- Latvian
+
+replacements["lv"] = { --[[ None, do you miss any? ]] }
+
+entries["lv"] = {
+ ["a"] = "a",
+ ["ā"] = "a",
+ ["b"] = "b",
+ ["c"] = "c",
+ ["č"] = "č",
+ ["d"] = "d",
+ ["e"] = "e",
+ ["ē"] = "e",
+ ["f"] = "f",
+ ["g"] = "g",
+ ["ģ"] = "ģ",
+ ["h"] = "h",
+ ["i"] = "i",
+ ["ī"] = "i",
+ ["j"] = "j",
+ ["k"] = "k",
+ ["ķ"] = "ķ",
+ ["l"] = "l",
+ ["ļ"] = "ļ",
+ ["m"] = "m",
+ ["n"] = "n",
+ ["ņ"] = "ņ",
+ ["o"] = "o",
+ ["ō"] = "o",
+ ["p"] = "p",
+ ["r"] = "r",
+ ["ŗ"] = "ŗ",
+ ["s"] = "s",
+ ["š"] = "š",
+ ["t"] = "t",
+ ["u"] = "u",
+ ["ū"] = "u",
+ ["v"] = "v",
+ ["z"] = "z",
+ ["ž"] = "ž",
+}
+
+mappings["lv"] = {
+ ["a"] = 1,
+ ["ā"] = 1,
+ ["b"] = 2,
+ ["c"] = 3,
+ ["č"] = 4,
+ ["d"] = 5,
+ ["e"] = 6,
+ ["ē"] = 6,
+ ["f"] = 7,
+ ["g"] = 8,
+ ["ģ"] = 9,
+ ["h"] = 10,
+ ["i"] = 11,
+ ["ī"] = 11,
+ ["j"] = 12,
+ ["k"] = 13,
+ ["ķ"] = 14,
+ ["l"] = 15,
+ ["ļ"] = 16,
+ ["m"] = 17,
+ ["n"] = 18,
+ ["ņ"] = 19,
+ ["o"] = 20,
+ ["ō"] = 20,
+ ["p"] = 21,
+ ["r"] = 22,
+ ["ŗ"] = 23,
+ ["s"] = 24,
+ ["š"] = 25,
+ ["t"] = 26,
+ ["u"] = 27,
+ ["ū"] = 27,
+ ["v"] = 28,
+ ["z"] = 29,
+ ["ž"] = 30,
+}
+
+adduppercaseentries ("lv")
+adduppercasemappings("lv")
+
+--- Hungarian
+
+-- Helpful but disturbing:
+-- http://en.wikipedia.org/wiki/Hungarian_alphabet#Alphabetical_ordering_.28collation.29
+-- (In short: you'd have to analyse word-compounds to realize a correct order
+-- for sequences like “nny”, “ssz”, and “zsz”. This is left as an exercise to
+-- the reader…)
+
+local hu_cs = uc(replacementoffset + 1)
+local hu_CS = uc(replacementoffset + 2)
+
+local hu_dz = uc(replacementoffset + 3)
+local hu_DZ = uc(replacementoffset + 4)
+
+local hu_dzs = uc(replacementoffset + 5)
+local hu_DZS = uc(replacementoffset + 6)
+
+local hu_gy = uc(replacementoffset + 7)
+local hu_GY = uc(replacementoffset + 8)
+
+local hu_ly = uc(replacementoffset + 9)
+local hu_LY = uc(replacementoffset + 10)
+
+local hu_ny = uc(replacementoffset + 11)
+local hu_NY = uc(replacementoffset + 12)
+
+local hu_sz = uc(replacementoffset + 13)
+local hu_SZ = uc(replacementoffset + 14)
+
+local hu_ty = uc(replacementoffset + 15)
+local hu_TY = uc(replacementoffset + 16)
+
+local hu_zs = uc(replacementoffset + 17)
+local hu_ZS = uc(replacementoffset + 18)
+
+replacements["hu"] = {
+ [1] = { "cs", hu_cs },
+ [2] = { "dz", hu_dz },
+ [3] = { "dzs", hu_dzs },
+ [4] = { "gy", hu_gy },
+ [5] = { "ly", hu_ly },
+ [6] = { "ny", hu_ny },
+ [7] = { "sz", hu_sz },
+ [8] = { "ty", hu_ty },
+ [9] = { "zs", hu_zs },
+}
+
+entries["hu"] = {
+ ["a"] = "a",
+ ["á"] = "a",
+ ["b"] = "b",
+ ["c"] = "c",
+ [hu_cs] = "cs",
+ ["d"] = "d",
+ [hu_dz] = "dz",
+ [hu_dzs] = "dzs",
+ ["e"] = "e",
+ ["é"] = "e",
+ ["f"] = "f",
+ ["g"] = "g",
+ [hu_gy] = "gy",
+ ["h"] = "h",
+ ["i"] = "i",
+ ["í"] = "i",
+ ["j"] = "j",
+ ["k"] = "k",
+ ["l"] = "l",
+ [hu_ly] = "ly",
+ ["m"] = "m",
+ ["n"] = "n",
+ [hu_ny] = "ny",
+ ["o"] = "o",
+ ["ó"] = "o",
+ ["ö"] = "ö",
+ ["ő"] = "ö",
+ ["p"] = "p",
+ ["q"] = "q",
+ ["r"] = "r",
+ ["s"] = "s",
+ [hu_sz] = "sz",
+ ["t"] = "t",
+ [hu_ty] = "ty",
+ ["u"] = "u",
+ ["ú"] = "u",
+ ["ü"] = "ü",
+ ["ű"] = "ü",
+ ["v"] = "v",
+ ["w"] = "w",
+ ["x"] = "x",
+ ["y"] = "y",
+ ["z"] = "z",
+ [hu_zs] = "zs",
+}
+
+mappings["hu"] = {
+ ["a"] = 1,
+ ["á"] = 1, -- -> a
+ ["b"] = 2,
+ ["c"] = 3,
+ [hu_cs] = 4,
+ ["d"] = 5,
+ [hu_dz] = 6,
+ [hu_dzs] = 7,
+ ["e"] = 8,
+ ["é"] = 8, -- -> e
+ ["f"] = 9,
+ ["g"] = 10,
+ [hu_gy] = 11,
+ ["h"] = 12,
+ ["i"] = 13,
+ ["í"] = 13, -- -> i
+ ["j"] = 14,
+ ["k"] = 15,
+ ["l"] = 16,
+ [hu_ly] = 17,
+ ["m"] = 18,
+ ["n"] = 19,
+ [hu_ny] = 20,
+ ["o"] = 21,
+ ["ó"] = 21, -- -> o
+ ["ö"] = 22,
+ ["ő"] = 22, -- -> ö
+ ["p"] = 23,
+ ["q"] = 24,
+ ["r"] = 25,
+ ["s"] = 26,
+ [hu_sz] = 27,
+ ["t"] = 28,
+ [hu_ty] = 29,
+ ["u"] = 30,
+ ["ú"] = 30, -- -> u
+ ["ü"] = 31,
+ ["ű"] = 31, -- -> ü
+ ["v"] = 32,
+ ["w"] = 33,
+ ["x"] = 34,
+ ["y"] = 35,
+ ["z"] = 36,
+ [hu_zs] = 37,
+}
+
+adduppercaseentries ("hu")
+adduppercasemappings("hu")
+
+entries ["hu"] [hu_CS] = entries ["hu"] [hu_cs]
+mappings["hu"] [hu_CS] = mappings["hu"] [hu_cs]
+entries ["hu"] [hu_DZ] = entries ["hu"] [hu_dz]
+mappings["hu"] [hu_DZ] = mappings["hu"] [hu_dz]
+entries ["hu"][hu_DZS] = entries ["hu"][hu_dzs]
+mappings["hu"][hu_DZS] = mappings["hu"][hu_dzs]
+entries ["hu"] [hu_GY] = entries ["hu"] [hu_gy]
+mappings["hu"] [hu_GY] = mappings["hu"] [hu_gy]
+entries ["hu"] [hu_LY] = entries ["hu"] [hu_ly]
+mappings["hu"] [hu_LY] = mappings["hu"] [hu_ly]
+entries ["hu"] [hu_NY] = entries ["hu"] [hu_ny]
+mappings["hu"] [hu_NY] = mappings["hu"] [hu_ny]
+entries ["hu"] [hu_SZ] = entries ["hu"] [hu_sz]
+mappings["hu"] [hu_SZ] = mappings["hu"] [hu_sz]
+entries ["hu"] [hu_TY] = entries ["hu"] [hu_ty]
+mappings["hu"] [hu_TY] = mappings["hu"] [hu_ty]
+entries ["hu"] [hu_ZS] = entries ["hu"] [hu_zs]
+mappings["hu"] [hu_ZS] = mappings["hu"] [hu_zs]
+
+--- Estonian
+
+replacements["et"] = { --[[ None, do you miss any? ]] }
+
+entries["et"] = {
+ ["a"] = "a",
+ ["b"] = "b",
+ ["d"] = "d",
+ ["e"] = "e",
+ ["f"] = "f",
+ ["g"] = "g",
+ ["h"] = "h",
+ ["i"] = "i",
+ ["j"] = "j",
+ ["k"] = "k",
+ ["l"] = "l",
+ ["m"] = "m",
+ ["n"] = "n",
+ ["o"] = "o",
+ ["p"] = "p",
+ ["r"] = "r",
+ ["s"] = "s",
+ ["š"] = "š",
+ ["z"] = "z",
+ ["ž"] = "ž",
+ ["t"] = "t",
+ ["u"] = "u",
+ ["v"] = "v",
+ ["w"] = "v", -- foreign words only
+ ["õ"] = "õ",
+ ["ä"] = "ä",
+ ["ö"] = "ö",
+ ["ü"] = "ü",
+ ["x"] = "x", --foreign words only
+ ["y"] = "y", --foreign words only
+}
+
+mappings["et"] = {
+ ["a"] = 1,
+ ["b"] = 2,
+ ["d"] = 3,
+ ["e"] = 4,
+ ["f"] = 5,
+ ["g"] = 6,
+ ["h"] = 7,
+ ["i"] = 8,
+ ["j"] = 9,
+ ["k"] = 10,
+ ["l"] = 11,
+ ["m"] = 12,
+ ["n"] = 13,
+ ["o"] = 14,
+ ["p"] = 15,
+ ["r"] = 16,
+ ["s"] = 17,
+ ["š"] = 18,
+ ["z"] = 19,
+ ["ž"] = 20,
+ ["t"] = 21,
+ ["u"] = 22,
+ ["v"] = 23,
+ ["w"] = 23,
+ ["õ"] = 24,
+ ["ä"] = 25,
+ ["ö"] = 26,
+ ["ü"] = 27,
+ ["x"] = 28,
+ ["y"] = 29,
+}
+
+adduppercaseentries ("et")
+adduppercasemappings("et")
diff --git a/tex/context/base/spac-ali.mkiv b/tex/context/base/spac-ali.mkiv
index b168a02be..a9a84e82a 100644
--- a/tex/context/base/spac-ali.mkiv
+++ b/tex/context/base/spac-ali.mkiv
@@ -504,6 +504,7 @@
\def\dosetupalign[#1]% can be made faster by checking for defined #1
{\!!doneafalse
\!!donebfalse
+ \chardef\raggedstatus\zerocount
\processcommacommand[#1]\dodosetupngila
\processcommacommand[#1]\dodosetupalign}
diff --git a/tex/context/base/spac-ver.lua b/tex/context/base/spac-ver.lua
index 6ca229d2a..f5573b0bd 100644
--- a/tex/context/base/spac-ver.lua
+++ b/tex/context/base/spac-ver.lua
@@ -26,6 +26,9 @@ local texsprint, texlists, texdimen, texbox = tex.sprint, tex.lists, tex.dimen,
local lpegmatch = lpeg.match
local unpack = unpack or table.unpack
local points = number.points
+local allocate = utilities.storage.allocate
+
+local P, C, R, S, Cc = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cc
local nodes, node, trackers, attributes = nodes, node, trackers, attributes
@@ -44,64 +47,63 @@ local trace_vspacing = false trackers.register("builders.vspacing",
local trace_vsnapping = false trackers.register("builders.vsnapping", function(v) trace_vsnapping = v end)
local trace_vpacking = false trackers.register("builders.vpacking", function(v) trace_vpacking = v end)
-local report_vspacing = logs.new("vspacing")
-local report_collapser = logs.new("collapser")
-local report_snapper = logs.new("snapper")
-
-local skip_category = attributes.private('skip-category')
-local skip_penalty = attributes.private('skip-penalty')
-local skip_order = attributes.private('skip-order')
-local snap_category = attributes.private('snap-category')
-local display_math = attributes.private('display-math')
-local snap_method = attributes.private('snap-method')
-local snap_vbox = attributes.private('snap-vbox')
-
-local has_attribute = node.has_attribute
-local unset_attribute = node.unset_attribute
-local set_attribute = node.set_attribute
-local find_node_tail = node.tail
-local free_node = node.free
-local copy_node = node.copy
-local traverse_nodes = node.traverse
-local traverse_nodes_id = node.traverse_id
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local remove_node = nodes.remove
-local count_nodes = nodes.count
-local node_ids_to_string = nodes.ids_to_string
-local hpack_node = node.hpack
-local vpack_node = node.vpack
-local writable_spec = nodes.writable_spec
-local listtoutf = nodes.listtoutf
-
-local nodepool = nodes.pool
-
-local new_penalty = nodepool.penalty
-local new_kern = nodepool.kern
-local new_rule = nodepool.rule
-
-local nodecodes = nodes.nodecodes
-local skipcodes = nodes.skipcodes
-local fillcodes = nodes.fillcodes
-
-local penalty_code = nodecodes.penalty
-local kern_code = nodecodes.kern
-local glue_code = nodecodes.glue
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local whatsit_code = nodecodes.whatsit
-
-local userskip_code = skipcodes.userskip
-
-builders.vspacing = builders.vspacing or { }
-local vspacing = builders.vspacing
-vspacing.data = vspacing.data or { }
+local report_vspacing = logs.new("vspacing")
+local report_collapser = logs.new("collapser")
+local report_snapper = logs.new("snapper")
+
+local skip_category = attributes.private('skip-category')
+local skip_penalty = attributes.private('skip-penalty')
+local skip_order = attributes.private('skip-order')
+local snap_category = attributes.private('snap-category')
+local display_math = attributes.private('display-math')
+local snap_method = attributes.private('snap-method')
+local snap_vbox = attributes.private('snap-vbox')
+
+local has_attribute = node.has_attribute
+local unset_attribute = node.unset_attribute
+local set_attribute = node.set_attribute
+local find_node_tail = node.tail
+local free_node = node.free
+local copy_node = node.copy
+local traverse_nodes = node.traverse
+local traverse_nodes_id = node.traverse_id
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local remove_node = nodes.remove
+local count_nodes = nodes.count
+local nodeidstostring = nodes.idstostring
+local hpack_node = node.hpack
+local vpack_node = node.vpack
+local writable_spec = nodes.writable_spec
+local listtoutf = nodes.listtoutf
+
+local nodepool = nodes.pool
+
+local new_penalty = nodepool.penalty
+local new_kern = nodepool.kern
+local new_rule = nodepool.rule
+
+local nodecodes = nodes.nodecodes
+local skipcodes = nodes.skipcodes
+local fillcodes = nodes.fillcodes
+
+local penalty_code = nodecodes.penalty
+local kern_code = nodecodes.kern
+local glue_code = nodecodes.glue
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local whatsit_code = nodecodes.whatsit
+
+local userskip_code = skipcodes.userskip
+
+builders.vspacing = builders.vspacing or { }
+local vspacing = builders.vspacing
+vspacing.data = vspacing.data or { }
vspacing.data.snapmethods = vspacing.data.snapmethods or { }
+local snapmethods = vspacing.data.snapmethods --maybe some older code can go
-storage.register("builders/vspacing/data/snapmethods", vspacing.data.snapmethods, "builders.vspacing.data.snapmethods")
-
-local snapmethods = vspacing.data.snapmethods --maybe some older code can go
+storage.register("builders/vspacing/data/snapmethods", snapmethods, "builders.vspacing.data.snapmethods")
local default = {
maxheight = true,
@@ -483,7 +485,7 @@ local function snap_topskip(current,method)
return w, wd
end
-vspacing.categories = {
+local categories = allocate {
[0] = 'discard',
[1] = 'largest',
[2] = 'force' ,
@@ -495,7 +497,7 @@ vspacing.categories = {
[8] = 'together'
}
-local categories = vspacing.categories
+vspacing.categories = categories
function vspacing.tocategories(str)
local t = { }
@@ -518,8 +520,8 @@ function vspacing.tocategory(str)
end
end
-vspacing.data.map = vspacing.data.map or { }
-vspacing.data.skip = vspacing.data.skip or { }
+vspacing.data.map = vspacing.data.map or { } -- allocate ?
+vspacing.data.skip = vspacing.data.skip or { } -- allocate ?
storage.register("builders/vspacing/data/map", vspacing.data.map, "builders.vspacing.data.map")
storage.register("builders/vspacing/data/skip", vspacing.data.skip, "builders.vspacing.data.skip")
@@ -536,17 +538,17 @@ do -- todo: interface.variables
local map = vspacing.data.map
local skip = vspacing.data.skip
- local multiplier = lpeg.C(lpeg.S("+-")^0 * lpeg.R("09")^1) * lpeg.P("*")
- local category = lpeg.P(":") * lpeg.C(lpeg.P(1)^1)
- local keyword = lpeg.C((1-category)^1)
- local splitter = (multiplier + lpeg.Cc(1)) * keyword * (category + lpeg.Cc(false))
+ local multiplier = C(S("+-")^0 * R("09")^1) * P("*")
+ local category = P(":") * C(P(1)^1)
+ local keyword = C((1-category)^1)
+ local splitter = (multiplier + Cc(1)) * keyword * (category + Cc(false))
local k_fixed, k_flexible, k_category, k_penalty, k_order = variables.fixed, variables.flexible, "category", "penalty", "order"
-- This will change: just node.write and we can store the values in skips which
-- then obeys grouping
- local function analyse(str,oldcategory,texsprint) -- we could use shorter names
+ local function analyze(str,oldcategory,texsprint) -- we could use shorter names
for s in gmatch(str,"([^ ,]+)") do
local amount, keyword, detail = lpegmatch(splitter,s)
if not keyword then
@@ -554,7 +556,7 @@ do -- todo: interface.variables
else
local mk = map[keyword]
if mk then
- category = analyse(mk,category,texsprint)
+ category = analyze(mk,category,texsprint)
elseif keyword == k_fixed then
texsprint(ctxcatcodes,"\\fixedblankskip")
elseif keyword == k_flexible then
@@ -592,10 +594,10 @@ do -- todo: interface.variables
return category
end
- function vspacing.analyse(str)
+ function vspacing.analyze(str)
local texsprint = (trace_vspacing and logger) or texsprint
texsprint(ctxcatcodes,"\\startblankhandling")
- analyse(str,1,texsprint)
+ analyze(str,1,texsprint)
texsprint(ctxcatcodes,"\\stopblankhandling")
end
@@ -1170,10 +1172,10 @@ end
local stackhead, stacktail, stackhack = nil, nil, false
local function report(message,lst)
- report_vspacing(message,count_nodes(lst,true),node_ids_to_string(lst))
+ report_vspacing(message,count_nodes(lst,true),nodeidstostring(lst))
end
-function nodes.handlers.pagespacing(newhead,where)
+function vspacing.pagehandler(newhead,where)
--~ local newhead = texlists.contrib_head
if newhead then
--~ starttiming(vspacing)
@@ -1237,7 +1239,7 @@ local ignore = table.tohash {
-- "vbox",
}
-function nodes.handlers.vboxspacing(head,where)
+function vspacing.vboxhandler(head,where)
if head and not ignore[where] and head.next then
-- starttiming(vspacing)
head = collapser(head,"vbox",where,trace_vbox_vspacing,true,snap_vbox) -- todo: local snapper
@@ -1246,7 +1248,7 @@ function nodes.handlers.vboxspacing(head,where)
return head
end
-function nodes.collapsevbox(n) -- for boxes but using global snap_method
+function vspacing.collapsevbox(n) -- for boxes but using global snap_method
local list = texbox[n].list
if list then
-- starttiming(vspacing)
@@ -1258,12 +1260,12 @@ end
-- We will split this module so a few locals are repeated. Also this will be
-- rewritten.
-local attribute = attributes.private('graphicvadjust')
+local attribute = attributes.private('graphicvadjust')
-local nodecodes = nodes.nodecodes
+local nodecodes = nodes.nodecodes
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
local remove_node = nodes.remove
local hpack_node = node.hpack
@@ -1361,7 +1363,7 @@ function nodes.builders.buildpage_filter(groupcode)
return (done and head) or true
end
-callbacks.register('vpack_filter', nodes.builders.vpack_filter, "vertical spacing etc")
+callbacks.register('vpack_filter', nodes.builders.vpack_filter, "vertical spacing etc")
callbacks.register('buildpage_filter', nodes.builders.buildpage_filter, "vertical spacing etc (mvl)")
statistics.register("v-node processing time", function()
diff --git a/tex/context/base/spac-ver.mkiv b/tex/context/base/spac-ver.mkiv
index db80c6e36..d544d237b 100644
--- a/tex/context/base/spac-ver.mkiv
+++ b/tex/context/base/spac-ver.mkiv
@@ -1501,7 +1501,7 @@
\def\dododosnaptogrid#1%
{\ifvbox\nextbox % this will go away
- \ctxlua{nodes.collapsevbox(\number\nextbox)}% isn't that already done?
+ \ctxlua{builders.vspacing.collapsevbox(\number\nextbox)}% isn't that already done?
\fi
\doifelsenothing{#1}{\setsnapvalue\v!normal}{\autosetsnapvalue{#1}}%
\ctxlua{builders.vspacing.snapbox(\number\nextbox,\number\attribute\snapmethodattribute)}%
@@ -1609,19 +1609,6 @@
% \start \dosetstretch{.25em} \setuptolerance[tolerant,stretch] \input tufte \endgraf \stop
% \start \dosetstretch{.5em} effe flink doorfietsen \stop
-% \def\dosetupgridsnapping % calls too often, only needed in gridsnapping
-% {\ctxlua{nodes.setsnapvalue(1,\number\openstrutheight,\number\openstrutdepth)}}
-%
-% \def\doenablegridsnapping
-% {\attribute\snapcategoryattribute\plusone
-% \topskip\strutht
-% \offinterlineskip}
-%
-% \def\dodisablegridsnapping
-% {\attribute\snapcategoryattribute\attributeunsetvalue
-% % reset topskip
-% \oninterlineskip}
-
% experimental code, not yet interfaced:
% category:
@@ -1776,8 +1763,8 @@
\fi\fi}
\def\dovspacingyes[#1]%
- %{\ifmmode\else\ctxlua{builders.vspacing.analyse("\iffirstargument#1\else\s!default\fi")}\fi}
- {\ifmmode\else\ctxlua{builders.vspacing.analyse("\iffirstargument#1\else\currentvspacing\fi")}\fi}
+ %{\ifmmode\else\ctxlua{builders.vspacing.analyze("\iffirstargument#1\else\s!default\fi")}\fi}
+ {\ifmmode\else\ctxlua{builders.vspacing.analyze("\iffirstargument#1\else\currentvspacing\fi")}\fi}
\def\dovspacingnop[#1]%
{\ifmmode\else\par\fi}
diff --git a/tex/context/base/strc-bkm.lua b/tex/context/base/strc-bkm.lua
index 77d616260..f9ce93eef 100644
--- a/tex/context/base/strc-bkm.lua
+++ b/tex/context/base/strc-bkm.lua
@@ -104,7 +104,7 @@ end
function bookmarks.place()
if next(names) then
- local list = lists.filter_collected(names,"all",nil,lists.collected,forced)
+ local list = lists.filtercollected(names,"all",nil,lists.collected,forced)
if #list > 0 then
local levels, lastlevel = { }, 1
for i=1,#list do
diff --git a/tex/context/base/strc-blk.lua b/tex/context/base/strc-blk.lua
index 5d6d90da9..b3a36306e 100644
--- a/tex/context/base/strc-blk.lua
+++ b/tex/context/base/strc-blk.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['strc--blk'] = {
+if not modules then modules = { } end modules ['strc-blk'] = {
version = 1.001,
comment = "companion to strc-blk.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -11,6 +11,7 @@ if not modules then modules = { } end modules ['strc--blk'] = {
local texprint, format, gmatch, find = tex.print, string.format, string.gmatch, string.find
local lpegmatch = lpeg.match
local settings_to_set, settings_to_array = utilities.parsers.settings_to_set, utilities.parsers.settings_to_array
+local allocate, mark = utilities.storage.allocate, utilities.storage.mark
local ctxcatcodes = tex.ctxcatcodes
@@ -22,17 +23,18 @@ local blocks = structures.blocks
local sections = structures.sections
local lists = structures.lists
-blocks.collected = blocks.collected or { }
-blocks.tobesaved = blocks.tobesaved or { }
-blocks.states = blocks.states or { }
+local collected, tobesaved, states = allocate(), allocate(), allocate()
-local tobesaved, collected, states = blocks.tobesaved, blocks.collected, blocks.states
+blocks.collected = collected
+blocks.tobesaved = tobesaved
+blocks.states = states
local function initializer()
- collected, tobesaved = blocks.collected, blocks.tobesaved
+ collected = mark(blocks.collected)
+ tobesaved = mark(blocks.tobesaved)
end
-job.register('structures.blocks.collected', blocks.tobesaved, initializer)
+job.register('structures.blocks.collected', tobesaved, initializer)
local printer = (lpeg.patterns.textline/texprint)^0 -- can be shared
@@ -85,7 +87,7 @@ function blocks.select(state,name,tag,criterium)
local tags = not all and settings_to_set(tag)
local hide = state == "process"
local n = sections.numberatdepth(criterium)
- local result = lists.filter_collected("all", criterium, n, collected, { })
+ local result = lists.filtercollected("all", criterium, n, collected, { })
for i=1,#result do
local ri = result[i]
local metadata = ri.metadata
diff --git a/tex/context/base/strc-def.mkiv b/tex/context/base/strc-def.mkiv
index 94bc2fb14..190764424 100644
--- a/tex/context/base/strc-def.mkiv
+++ b/tex/context/base/strc-def.mkiv
@@ -109,21 +109,30 @@
% \part{second} \chapter{gamma} \section{a} \section{b} \subsection{x} \subsection{y} \chapter{delta}
% \stoptext
-\definestructureseparatorset [\s!default] [] [.]
-\definestructureconversionset [\s!default] [] [numbers]
-\definestructureresetset [\s!default] [] [1] % each level
-\definestructureprefixset [\s!default] [section-1,section-2,section-3] []
+\definestructureseparatorset [\s!default] [] [.]
+\definestructureconversionset [\s!default] [] [numbers]
+\definestructureresetset [\s!default] [] [1] % each level
+\definestructureprefixset [\s!default] [section-1,section-2,section-3] []
-\definestructureprefixset [\v!all] [section-1,section-2,section-3,section-4,section-5,section-6,section-7,section-8] []
-\definestructureprefixset [\v!none] [] []
+\definestructureconversionset [\v!pagenumber] [] [numbers]
-\definestructureprefixset [\v!part] [section-1] []
-\definestructureprefixset [\v!chapter] [section-2] []
+\definestructureprefixset [\v!all] [section-1,section-2,section-3,section-4,section-5,section-6,section-7,section-8] []
+\definestructureprefixset [\v!none] [] []
+
+\definestructureprefixset [\v!part] [section-1] []
+\definestructureprefixset [\v!chapter] [section-2] []
\definestructureseparatorset [\v!appendix:\s!default] [] [.]
\definestructureconversionset [\v!appendix:\s!default] [Romannumerals,Characters] [numbers]
\definestructureresetset [\v!appendix:\s!default] [] [1] % why was this 0
+\setupuserpagenumber [\c!numberconversionset=\v!pagenumber]
+
+% \startsetups defaults:frontpart:pagenumbers:roman
+% \definestructureconversionset[\c!frontpart:\c!pagenumber][][romannumerals]
+% \setupuserpagenumber[\c!way=\v!byblock]
+% \stopsetups
+
% \definesectionblock
\definestructureblock [\v!frontpart] [\v!frontmatter] [\c!number=\v!no]
diff --git a/tex/context/base/strc-des.mkiv b/tex/context/base/strc-des.mkiv
index 0a5f197de..b0a240612 100644
--- a/tex/context/base/strc-des.mkiv
+++ b/tex/context/base/strc-des.mkiv
@@ -415,6 +415,8 @@
\BeforePar{\@@makedescription[#1]{#2}}%
\GotoPar}
+\let\dotagsetdescriptiontag\relax
+
\def\@@dostartdescriptionindeed
{\edef\currentdescriptionlocation{\descriptionparameter\c!location}%
\ifx\currentdescriptionlocation\empty
@@ -425,6 +427,7 @@
\fi
\dostarttagged\t!description\currentdescription
\dostarttagged\t!descriptiontag\empty
+ \dotagsetdescriptiontag
\@@dostartdescription
\csname @@description\currentdescriptionlocation\endcsname
\dostoptagged
diff --git a/tex/context/base/strc-doc.lua b/tex/context/base/strc-doc.lua
index 660dd61f3..d1048972b 100644
--- a/tex/context/base/strc-doc.lua
+++ b/tex/context/base/strc-doc.lua
@@ -15,6 +15,7 @@ local format, gsub, find, concat, gmatch, match = string.format, string.gsub, st
local texsprint, texwrite = tex.sprint, tex.write
local concat = table.concat
local max, min = math.max, math.min
+local allocate, mark = utilities.storage.allocate, utilities.storage.mark
local ctxcatcodes = tex.ctxcatcodes
local variables = interfaces.variables
@@ -34,6 +35,7 @@ local sections = structures.sections
local lists = structures.lists
local counters = structures.counters
local sets = structures.sets
+local tags = structures.tags
local processors = structures.processors
local sprintprocessor = processors.sprint
@@ -70,16 +72,18 @@ documents.initialize()
-- -- -- sections -- -- --
-sections.collected = sections.collected or { }
-sections.tobesaved = sections.tobesaved or { }
-local collected, tobesaved = sections.collected, sections.tobesaved
+local collected, tobesaved = allocate(), allocate()
+
+sections.collected = collected
+sections.tobesaved = tobesaved
--~ local function initializer()
---~ collected, tobesaved = sections.collected, sections.tobesaved
+--~ collected = mark(sections.collected)
+--~ tobesaved = mark(sections.tobesaved)
--~ end
---~ job.register('structures.sections.collected', sections.tobesaved, initializer)
+--~ job.register('structures.sections.collected', tobesaved, initializer)
function sections.currentid()
return #tobesaved
@@ -311,6 +315,9 @@ function sections.somelevel(given)
if trace_detail then
report_structure("name '%s', numbers '%s', own numbers '%s'",givenname,concat(numberdata.numbers, " "),concat(numberdata.ownnumbers, " "))
end
+
+ given.references.tag = tags.last and tags.last("section") -- (metadata.kind) sort of forward usage (section -> structure)
+
given.references.section = sections.save(given)
-- given.numberdata = nil
end
@@ -472,48 +479,51 @@ end
--~ todo: test this
--~
---~ local function process(index,numbers,ownnumbers,criterium,separatorset,conversion,conversionset,index,entry,preceding,done) -- todo: result
---~ -- todo: too much (100 steps)
---~ local number = numbers and (numbers[index] or 0)
---~ local ownnumber = ownnumbers and ownnumbers[index] or ""
---~ if number > criterium or (ownnumber ~= "") then
---~ local block = (entry.block ~= "" and entry.block) or sections.currentblock() -- added
---~ if preceding then
---~ local separator = sets.get("structure:separators",block,separatorset,preceding,".")
---~ if result then
---~ result[#result+1] = ignoreprocessor(separator)
---~ else
---~ sprintprocessor(ctxcatcodes,separator)
---~ end
---~ preceding = false
---~ end
---~ if result then
---~ if ownnumber ~= "" then
---~ result[#result+1] = ownnumber
---~ elseif conversion and conversion ~= "" then -- traditional (e.g. used in itemgroups)
---~ result[#result+1] = converters.convert(conversion,number,true)
---~ else
---~ local theconversion = sets.get("structure:conversions",block,conversionset,index,"numbers")
---~ result[#result+1] = converters.convert(theconversion,number,true)
---~ end
---~ else
---~ if ownnumber ~= "" then
---~ sprintprocessor(ctxcatcodes,ownnumber)
---~ elseif conversion and conversion ~= "" then -- traditional (e.g. used in itemgroups)
---~ texsprint(ctxcatcodes,format("\\convertnumber{%s}{%s}",conversion,number))
---~ -- context.convertnumber(conversion,number)
---~ else
---~ local theconversion = sets.get("structure:conversions",block,conversionset,index,"numbers")
---~ sprintprocessor(ctxcatcodes,theconversion,function(str)
---~ return format("\\convertnumber{%s}{%s}",str or "numbers",number)
---~ end)
---~ end
---~ end
---~ return index, true -- preceding, done
---~ else
---~ return preceding or false, done
---~ end
---~ end
+
+local function process(index,numbers,ownnumbers,criterium,separatorset,conversion,conversionset,index,entry,result,preceding,done)
+ -- todo: too much (100 steps)
+ local number = numbers and (numbers[index] or 0)
+ local ownnumber = ownnumbers and ownnumbers[index] or ""
+ if number > criterium or (ownnumber ~= "") then
+ local block = (entry.block ~= "" and entry.block) or sections.currentblock() -- added
+ if preceding then
+ local separator = sets.get("structure:separators",block,separatorset,preceding,".")
+ if separator then
+ if result then
+ result[#result+1] = ignoreprocessor(separator)
+ else
+ sprintprocessor(ctxcatcodes,separator)
+ end
+ end
+ preceding = false
+ end
+ if result then
+ if ownnumber ~= "" then
+ result[#result+1] = ownnumber
+ elseif conversion and conversion ~= "" then -- traditional (e.g. used in itemgroups) .. inherited!
+ result[#result+1] = converters.convert(conversion,number,true)
+ else
+ local theconversion = sets.get("structure:conversions",block,conversionset,index,"numbers")
+ result[#result+1] = converters.convert(theconversion,number,true)
+ end
+ else
+ if ownnumber ~= "" then
+ sprintprocessor(ctxcatcodes,ownnumber)
+ elseif conversion and conversion ~= "" then -- traditional (e.g. used in itemgroups)
+ texsprint(ctxcatcodes,format("\\convertnumber{%s}{%s}",conversion,number))
+ -- context.convertnumber(conversion,number)
+ else
+ local theconversion = sets.get("structure:conversions",block,conversionset,index,"numbers")
+ sprintprocessor(ctxcatcodes,theconversion,function(str)
+ return format("\\convertnumber{%s}{%s}",str or "numbers",number)
+ end)
+ end
+ end
+ return index, true
+ else
+ return preceding or false, done
+ end
+end
function sections.typesetnumber(entry,kind,...) -- kind='section','number','prefix'
if entry and entry.hidenumber ~= true then -- can be nil
@@ -579,50 +589,6 @@ function sections.typesetnumber(entry,kind,...) -- kind='section','number','pref
local numbers, ownnumbers = entry.numbers, entry.ownnumbers
if numbers then
local done, preceding = false, false
- local function process(index,result) -- move to outer
- -- todo: too much (100 steps)
- local number = numbers and (numbers[index] or 0)
- local ownnumber = ownnumbers and ownnumbers[index] or ""
- if number > criterium or (ownnumber ~= "") then
- local block = (entry.block ~= "" and entry.block) or sections.currentblock() -- added
- if preceding then
- local separator = sets.get("structure:separators",block,separatorset,preceding,".")
- if separator then
- if result then
- result[#result+1] = ignoreprocessor(separator)
- else
- sprintprocessor(ctxcatcodes,separator)
- end
- end
- preceding = false
- end
- if result then
- if ownnumber ~= "" then
- result[#result+1] = ownnumber
- elseif conversion and conversion ~= "" then -- traditional (e.g. used in itemgroups)
- result[#result+1] = converters.convert(conversion,number,true)
- else
- local theconversion = sets.get("structure:conversions",block,conversionset,index,"numbers")
- result[#result+1] = converters.convert(theconversion,number,true)
- end
- else
- if ownnumber ~= "" then
- sprintprocessor(ctxcatcodes,ownnumber)
- elseif conversion and conversion ~= "" then -- traditional (e.g. used in itemgroups)
- texsprint(ctxcatcodes,format("\\convertnumber{%s}{%s}",conversion,number))
- --~ context.convertnumber(conversion,number)
- else
- local theconversion = sets.get("structure:conversions",block,conversionset,index,"numbers")
- sprintprocessor(ctxcatcodes,theconversion,function(str)
- return format("\\convertnumber{%s}{%s}",str or "numbers",number)
- end)
- end
- end
- preceding, done = index, true
- else
- preceding = preceding or false
- end
- end
--
local result = kind == "direct" and { }
if result then
@@ -639,65 +605,57 @@ function sections.typesetnumber(entry,kind,...) -- kind='section','number','pref
end
if prefixlist and (kind == 'section' or kind == 'prefix' or kind == 'direct') then
-- find valid set (problem: for sectionnumber we should pass the level)
- -- if kind == "section" then
- -- no holes
- local b, e, bb, ee = 1, #prefixlist, 0, 0
- -- find last valid number
- for k=e,b,-1 do
- local prefix = prefixlist[k]
- local index = sections.getlevel(prefix) or k
- if index >= firstprefix and index <= lastprefix then
- local number = numbers and numbers[index]
- if number then
- local ownnumber = ownnumbers and ownnumbers[index] or ""
- if number > 0 or (ownnumber ~= "") then
- break
- else
- e = k -1
- end
+ -- no holes
+ local b, e, bb, ee = 1, #prefixlist, 0, 0
+ -- find last valid number
+ for k=e,b,-1 do
+ local prefix = prefixlist[k]
+ local index = sections.getlevel(prefix) or k
+ if index >= firstprefix and index <= lastprefix then
+ local number = numbers and numbers[index]
+ if number then
+ local ownnumber = ownnumbers and ownnumbers[index] or ""
+ if number > 0 or (ownnumber ~= "") then
+ break
+ else
+ e = k -1
end
end
end
- -- find valid range
- for k=b,e do
- local prefix = prefixlist[k]
- local index = sections.getlevel(prefix) or k
- if index >= firstprefix and index <= lastprefix then
- local number = numbers and numbers[index]
- if number then
- local ownnumber = ownnumbers and ownnumbers[index] or ""
- if number > 0 or (ownnumber ~= "") then
- if bb == 0 then bb = k end
- ee = k
- else
- bb, ee = 0, 0
- end
+ end
+ -- find valid range
+ for k=b,e do
+ local prefix = prefixlist[k]
+ local index = sections.getlevel(prefix) or k
+ if index >= firstprefix and index <= lastprefix then
+ local number = numbers and numbers[index]
+ if number then
+ local ownnumber = ownnumbers and ownnumbers[index] or ""
+ if number > 0 or (ownnumber ~= "") then
+ if bb == 0 then bb = k end
+ ee = k
else
- break
+ bb, ee = 0, 0
end
+ else
+ break
end
end
- -- print valid range
- for k=bb,ee do
- local prefix = prefixlist[k]
- local index = sections.getlevel(prefix) or k
- if index >= firstprefix and index <= lastprefix then
- process(index,result)
- end
+ end
+ -- print valid range
+ for k=bb,ee do
+ local prefix = prefixlist[k]
+ local index = sections.getlevel(prefix) or k
+ if index >= firstprefix and index <= lastprefix then
+ -- process(index,result)
+ preceding, done = process(index,numbers,ownnumbers,criterium,separatorset,conversion,conversionset,index,entry,result,preceding,done)
end
- -- else
- -- for k=1,#prefixlist do
- -- local prefix = prefixlist[k]
- -- local index = sections.getlevel(prefix) or k
- -- if index >= firstprefix and index <= lastprefix then
- -- process(index)
- -- end
- -- end
- -- end
+ end
else
-- also holes check
- for prefix=firstprefix,lastprefix do
- process(prefix,result)
+ for index=firstprefix,lastprefix do
+ -- process(index,result)
+ preceding, done = process(index,numbers,ownnumbers,criterium,separatorset,conversion,conversionset,index,entry,result,preceding,done)
end
end
--
@@ -777,3 +735,49 @@ function sections.getnumber(depth,what) -- redefined here
local sectiondata = sections.findnumber(depth,what)
texwrite((sectiondata and sectiondata.numbers[depth]) or 0)
end
+
+--~ local done, preceding = false, false
+--~ local function process(index,result) -- move to outer
+--~ -- todo: too much (100 steps)
+--~ local number = numbers and (numbers[index] or 0)
+--~ local ownnumber = ownnumbers and ownnumbers[index] or ""
+--~ if number > criterium or (ownnumber ~= "") then
+--~ local block = (entry.block ~= "" and entry.block) or sections.currentblock() -- added
+--~ if preceding then
+--~ local separator = sets.get("structure:separators",block,separatorset,preceding,".")
+--~ if separator then
+--~ if result then
+--~ result[#result+1] = ignoreprocessor(separator)
+--~ else
+--~ sprintprocessor(ctxcatcodes,separator)
+--~ end
+--~ end
+--~ preceding = false
+--~ end
+--~ if result then
+--~ if ownnumber ~= "" then
+--~ result[#result+1] = ownnumber
+--~ elseif conversion and conversion ~= "" then -- traditional (e.g. used in itemgroups)
+--~ result[#result+1] = converters.convert(conversion,number,true)
+--~ else
+--~ local theconversion = sets.get("structure:conversions",block,conversionset,index,"numbers")
+--~ result[#result+1] = converters.convert(theconversion,number,true)
+--~ end
+--~ else
+--~ if ownnumber ~= "" then
+--~ sprintprocessor(ctxcatcodes,ownnumber)
+--~ elseif conversion and conversion ~= "" then -- traditional (e.g. used in itemgroups)
+--~ texsprint(ctxcatcodes,format("\\convertnumber{%s}{%s}",conversion,number))
+--~ --~ context.convertnumber(conversion,number)
+--~ else
+--~ local theconversion = sets.get("structure:conversions",block,conversionset,index,"numbers")
+--~ sprintprocessor(ctxcatcodes,theconversion,function(str)
+--~ return format("\\convertnumber{%s}{%s}",str or "numbers",number)
+--~ end)
+--~ end
+--~ end
+--~ preceding, done = index, true
+--~ else
+--~ preceding = preceding or false
+--~ end
+--~ end
diff --git a/tex/context/base/strc-flt.mkiv b/tex/context/base/strc-flt.mkiv
index fe9ddfea3..e50306ead 100644
--- a/tex/context/base/strc-flt.mkiv
+++ b/tex/context/base/strc-flt.mkiv
@@ -475,7 +475,7 @@
{\settrialtypesetting
\notesenabledfalse
\putcompletecaption{#1}{#2}}%
- % new, \placefigure{\XMLflush{somecaption}}{} passes earlier empty check
+ % new, \placefigure{\xmlfirst{#1}{somecaption}}{} passes earlier empty check
% so here we misuse the scratch box; actually this means that the previous
% test can go away (some day, when i redo this module)
\ifdim\wd\tempcaptionbox=\zeropoint
diff --git a/tex/context/base/strc-ini.lua b/tex/context/base/strc-ini.lua
index 1810369a5..164b60f33 100644
--- a/tex/context/base/strc-ini.lua
+++ b/tex/context/base/strc-ini.lua
@@ -25,6 +25,7 @@ local count, texwrite, texprint, texsprint = tex.count, tex.write, tex.print, te
local type, next, tonumber, tostring = type, next, tonumber, tostring
local lpegmatch = lpeg.match
local settings_to_array, settings_to_hash = utilities.parsers.settings_to_array, utilities.parsers.settings_to_hash
+local allocate, mark = utilities.storage.allocate, utilities.storage.mark
local ctxcatcodes, xmlcatcodes, notcatcodes = tex.ctxcatcodes, tex.xmlcatcodes, tex.notcatcodes -- tricky as we're in notcatcodes
@@ -69,6 +70,7 @@ structures.formulas = structures.formulas or { }
structures.sets = structures.sets or { }
structures.marks = structures.marks or { }
structures.floats = structures.floats or { }
+structures.synonyms = structures.synonyms or { }
--~ table.print(structures)
@@ -79,17 +81,18 @@ structures.floats = structures.floats or { }
local specials = structures.specials
-specials.collected = specials.collected or { }
-specials.tobesaved = specials.collected or { }
+local collected, tobesaved = allocate(), allocate()
-local collected, tobesaved = specials.collected, specials.tobesaved
+specials.collected = collected
+specials.tobesaved = tobesaved
local function initializer()
- collected, tobesaved = specials.collected, specials.tobesaved
+ collected = mark(specials.collected)
+ tobesaved = mark(specials.tobesaved)
end
if job then
- job.register('structures.specials.collected', specials.tobesaved, initializer)
+ job.register('structures.specials.collected', tobesaved, initializer)
end
function specials.store(class,data)
@@ -168,21 +171,43 @@ local tags = {
entry = "ctx:registerentry",
}
-function helpers.title(title,metadata) -- brrr
- if title and title ~= "" then
+-- We had the following but it overloads the main document so it's a no-go as we
+-- no longer push and pop. So now we use the tag as buffername, namespace and also
+-- (optionally) as a setups to be applied but keep in mind that document setups
+-- also get applied (when they use #1's).
+--
+-- local command = format("\\xmlprocessbuffer{%s}{%s}{}",metadata.xmlroot or "main",tag)
+
+function helpers.title(title,metadata) -- coding is xml is rather old and not that much needed now
+ if title and title ~= "" then -- so it might disappear
if metadata then
- if metadata.coding == "xml" then
- tag = tags[metadata.kind] or tags.generic
- buffers.set(tag,format("<?xml version='1.0'?><%s>%s</%s>",tag,title,tag))
- texsprint(ctxcatcodes,format("\\xmlprocessbuffer{%s}{%s}{}",metadata.xmlroot or "main",tag))
- elseif metadata.xmlsetup then
- texsprint(ctxcatcodes,format("\\xmlsetup{%s}{%s}",title,metadata.xmlsetup)) -- nasty
+ if metadata.coding == "xml" then -- title can contain raw xml
+ local tag = tags[metadata.kind] or tags.generic
+ local xmldata = format("<?xml version='1.0'?><%s>%s</%s>",tag,title,tag)
+ local command = format("\\xmlprocessbuffer{%s}{%s}{%s}","dummy",tag,metadata.xmlsetup or "")
+ buffers.set(tag,xmldata)
+ if trace_processors then
+ report_processors("xmldata: %s",xmldata)
+ report_processors("feeding: %s",command)
+ end
+ texsprint(ctxcatcodes,command)
+ elseif metadata.xmlsetup then -- title is reference to node (so \xmlraw should have been used)
+ local command = format("\\xmlsetup{%s}{%s}",title,metadata.xmlsetup)
+ if trace_processors then
+ report_processors("feeding: %s",command)
+ end
+ texsprint(ctxcatcodes,command)
else
local catcodes = metadata.catcodes
---~ print(tex.ctxcatcodes,tex.xmlcatcodes,catcodes,title)
if catcodes == notcatcodes or catcodes == xmlcatcodes then
+ if trace_processors then
+ report_processors("cct: %s (overloads %s), txt: %s",ctxcatcodes,catcodes,title)
+ end
texsprint(ctxcatcodes,title) -- nasty
else
+ if trace_processors then
+ report_processors("cct: %s, txt: %s",catcodes,title)
+ end
texsprint(catcodes,title)
end
end
diff --git a/tex/context/base/strc-itm.lua b/tex/context/base/strc-itm.lua
index 53e0d56bf..1c7c2adb5 100644
--- a/tex/context/base/strc-itm.lua
+++ b/tex/context/base/strc-itm.lua
@@ -7,11 +7,8 @@ if not modules then modules = { } end modules ['strc-itm'] = {
}
local structures = structures
-
-structures.itemgroups = structures.itemgroups or { }
-local itemgroups = structures.itemgroups
-
-local jobpasses = job.passes
+local itemgroups = structures.itemgroups
+local jobpasses = job.passes
function itemgroups.register(name,nofitems,maxwidth)
jobpasses.savedata("itemgroup", { nofitems, maxwidth })
diff --git a/tex/context/base/strc-itm.mkiv b/tex/context/base/strc-itm.mkiv
index 9bb631e78..8b8b6d243 100644
--- a/tex/context/base/strc-itm.mkiv
+++ b/tex/context/base/strc-itm.mkiv
@@ -311,7 +311,8 @@
\else
\getitemparameter\currentitemlevel\c!before
\fi\fi
- \dostarttagged\t!itemgroup\currentitemgroup}
+ \dostarttagged\t!itemgroup\currentitemgroup
+ \dotagsetitemize}
\def\itemaftercommand
{\dostoptagged
@@ -624,6 +625,8 @@
\let\currentitemindenting\empty
+\let\dotagsetitemize\relax
+
\def\redostartitemgroup[#1][#2]%
{\setfalse\inlinelistitem % new, no indent (leftskip)
\setfalse\concatnextitem % new, concat
diff --git a/tex/context/base/strc-lst.lua b/tex/context/base/strc-lst.lua
index 2cba90fe8..f237b6e42 100644
--- a/tex/context/base/strc-lst.lua
+++ b/tex/context/base/strc-lst.lua
@@ -17,43 +17,47 @@ local texsprint, texprint, texwrite, texcount = tex.sprint, tex.print, tex.write
local concat, insert, remove = table.concat, table.insert, table.remove
local lpegmatch = lpeg.match
local simple_hash_to_string, settings_to_hash = utilities.parsers.simple_hash_to_string, utilities.parsers.settings_to_hash
+local allocate, mark, checked = utilities.storage.allocate, utilities.storage.mark, utilities.storage.checked
local trace_lists = false trackers.register("structures.lists", function(v) trace_lists = v end)
local report_lists = logs.new("lists")
-local ctxcatcodes = tex.ctxcatcodes
-
-local structures = structures
-
-structures.lists = structures.lists or { }
+local ctxcatcodes = tex.ctxcatcodes
+local structures = structures
local lists = structures.lists
local sections = structures.sections
local helpers = structures.helpers
local documents = structures.documents
local pages = structures.pages
+local tags = structures.tags
local references = structures.references
-lists.collected = lists.collected or { }
-lists.tobesaved = lists.tobesaved or { }
+local collected = allocate()
+local tobesaved = allocate()
+local cached = allocate()
+local pushed = allocate()
+
+lists.collected = collected
+lists.tobesaved = tobesaved
+
lists.enhancers = lists.enhancers or { }
-lists.internals = lists.internals or { }
-lists.ordered = lists.ordered or { }
-lists.cached = lists.cached or { }
+lists.internals = allocate(lists.internals or { }) -- to be checked
+lists.ordered = allocate(lists.ordered or { }) -- to be checked
+lists.cached = cached
+lists.pushed = pushed
references.specials = references.specials or { }
-local cached, pushed = lists.cached, { }
-
local variables = interfaces.variables
local matchingtilldepth, numberatdepth = sections.matchingtilldepth, sections.numberatdepth
local function initializer()
-- create a cross reference between internal references
-- and list entries
- local collected = lists.collected
- local internals = references.internals
+ local collected = mark(lists.collected)
+ local internals = checked(references.internals)
local ordered = lists.ordered
for i=1,#collected do
local c = collected[i]
@@ -84,9 +88,7 @@ local function initializer()
end
end
-if job then
- job.register('structures.lists.collected', lists.tobesaved, initializer)
-end
+job.register('structures.lists.collected', tobesaved, initializer)
function lists.push(t)
local r = t.references
@@ -94,6 +96,9 @@ function lists.push(t)
local p = pushed[i]
if not p then
p = #cached + 1
+ if r.tag == nil then
+ r.tag = tags.last and tags.last(t.metadata.kind) -- maybe kind but then also check elsewhere
+ end
cached[p] = helpers.simplify(t)
pushed[i] = p
end
@@ -165,7 +170,7 @@ end
-- will be split
-local function filter_collected(names, criterium, number, collected, forced, nested) -- names is hash or string
+local function filtercollected(names, criterium, number, collected, forced, nested) -- names is hash or string
local numbers, depth = documents.data.numbers, documents.data.depth
local result, detail = { }, nil
criterium = gsub(criterium," ","") -- not needed
@@ -203,7 +208,7 @@ local function filter_collected(names, criterium, number, collected, forced, nes
end
elseif criterium == variables.current then
if depth == 0 then
- return filter_collected(names,variables.intro,number,collected,forced)
+ return filtercollected(names,variables.intro,number,collected,forced)
else
for i=1,#collected do
local v = collected[i]
@@ -235,7 +240,7 @@ local function filter_collected(names, criterium, number, collected, forced, nes
elseif criterium == variables.here then
-- this is quite dirty ... as cnumbers is not sparse we can misuse #cnumbers
if depth == 0 then
- return filter_collected(names,variables.intro,number,collected,forced)
+ return filtercollected(names,variables.intro,number,collected,forced)
else
for i=1,#collected do
local v = collected[i]
@@ -267,7 +272,7 @@ local function filter_collected(names, criterium, number, collected, forced, nes
end
elseif criterium == variables.previous then
if depth == 0 then
- return filter_collected(names,variables.intro,number,collected,forced)
+ return filtercollected(names,variables.intro,number,collected,forced)
else
for i=1,#collected do
local v = collected[i]
@@ -299,11 +304,11 @@ local function filter_collected(names, criterium, number, collected, forced, nes
elseif criterium == variables["local"] then -- not yet ok
local nested = nesting[#nesting]
if nested then
- return filter_collected(names,nested.name,nested.number,collected,forced,nested)
+ return filtercollected(names,nested.name,nested.number,collected,forced,nested)
elseif sections.autodepth(documents.data.numbers) == 0 then
- return filter_collected(names,variables.all,number,collected,forced)
+ return filtercollected(names,variables.all,number,collected,forced)
else
- return filter_collected(names,variables.current,number,collected,forced)
+ return filtercollected(names,variables.current,number,collected,forced)
end
else -- sectionname, number
-- not the same as register
@@ -343,10 +348,10 @@ local function filter_collected(names, criterium, number, collected, forced, nes
return result
end
-lists.filter_collected = filter_collected
+lists.filtercollected = filtercollected
function lists.filter(names, criterium, number, forced)
- return filter_collected(names, criterium, number, lists.collected, forced)
+ return filtercollected(names, criterium, number, lists.collected, forced)
end
lists.result = { }
diff --git a/tex/context/base/strc-lst.mkiv b/tex/context/base/strc-lst.mkiv
index 8578b7157..875cfa799 100644
--- a/tex/context/base/strc-lst.mkiv
+++ b/tex/context/base/strc-lst.mkiv
@@ -152,7 +152,7 @@
\ctxlua{structures.lists.process("#1","#2","#3")}%
\dostoptagged}
-\unexpanded\def\analysestructurelist#1#2#3%
+\unexpanded\def\analyzestructurelist#1#2#3%
{\ctxlua{structures.lists.analyze("#1","#2","#3")}}
\def\firststructureelementinlist#1%
@@ -857,7 +857,7 @@
\dosetuplist[#1][#2]%
\edef\currentlist{\firststructureelementinlist{#1}}%
\the\everystructurelist
- \analysestructurelist{#1}{\listparameter\c!criterium}{\number\listparameter\c!number}%
+ \analyzestructurelist{#1}{\listparameter\c!criterium}{\number\listparameter\c!number}%
\xdef\utilitylistlength{\structurelistsize}%
\endgroup
\dosetlistmode}
diff --git a/tex/context/base/strc-mar.lua b/tex/context/base/strc-mar.lua
index fd97836e6..d2e2b0352 100644
--- a/tex/context/base/strc-mar.lua
+++ b/tex/context/base/strc-mar.lua
@@ -7,8 +7,6 @@ if not modules then modules = { } end modules ['strc-mar'] = {
}
local structures = structures
-
-structures.marks = structures.marks or { }
local marks = structures.marks
local lists = structures.lists
diff --git a/tex/context/base/strc-mat.mkiv b/tex/context/base/strc-mat.mkiv
index 1e9bd8273..ca4d4b2c5 100644
--- a/tex/context/base/strc-mat.mkiv
+++ b/tex/context/base/strc-mat.mkiv
@@ -73,8 +73,9 @@
[\c!name=\v!formula,\s!counter=\v!formula,%
\s!hascaption=\v!yes,\s!hastitle=\v!yes,\s!hasnumber=\v!yes,%\s!haslevel=#6,%
\s!hasnumber=\v!no,%
- \c!reference=#1,\c!title=,\c!bookmark=]%
+ \c!reference=#1,\c!title=\namedformulaentry,\c!bookmark=]%
[#2]%
+ \glet\namedformulaentry\relax
\globallet#3\laststructurecounternumber
\globallet#4\laststructurecountersynchronize
\globallet#5\laststructurecounterattribute}
@@ -160,7 +161,7 @@
\currentplaceformulaattribute
\currentplaceformulasynchronize
\glet\currentplaceformulasynchronize\relax
-\theformuladestinationattribute\currentplaceformulaattribute
+ \theformuladestinationattribute\currentplaceformulaattribute
\fi
\ifnum\formulasnumbermode=\plusthree
\storecurrentformulanumber
@@ -171,7 +172,7 @@
\currentformulasattribute
\currentformulassynchronize
\glet\currentformulassynchronize\relax
-\theformuladestinationattribute\currentformulasattribute
+ \theformuladestinationattribute\currentformulasattribute
\fi
\ifnum\subformulasnumbermode=\plusthree
\currentsubformulassynchronize
@@ -186,7 +187,7 @@
\currentnestedformulaattribute
\currentnestedformulasynchronize
\glet\currentnestedformulasynchronize\relax
-\theformuladestinationattribute\currentnestedformulaattribute
+ \theformuladestinationattribute\currentnestedformulaattribute
\fi}
% needs checking ... too many:
@@ -255,6 +256,15 @@
\definelist[\v!formula]
+\installstructurelistprocessor\v!formula % to be checked ...
+ {\dodolistelement
+ \currentlist
+ \structurelistlocation
+ \structurelistgenericnumber
+ \structurelistgenerictitle % hm, can end up in message (volkers test)
+ \structurelistpagenumber
+ \structurelistrealpagenumber}
+
\appendtoks
\global\setfalse\insideplaceformula
\global\setfalse\insideplacesubformula
@@ -638,7 +648,10 @@
\def\forceformulanumberflag {+}
\def\docheckformulareference#1#2%
- {\chardef#1\ifx#2\empty
+ {\chardef#1%
+ \unless\ifx\namedformulaentry\relax % new 29/8/2010
+ \plusthree
+ \else\ifx#2\empty
\zerocount
\else\ifx#2\forceformulanumberflag
\plusone
@@ -646,7 +659,7 @@
\plustwo
\else
\plusthree
- \fi\fi\fi}
+ \fi\fi\fi\fi}
\def\formulanumber{\doformulanumber} % for the moment
@@ -769,12 +782,10 @@
\setformulalistentry{#2}%
\next}
-\global\let\doflushformulalistentry\gobbleoneargument
+\glet\namedformulaentry\relax % this will become a key/value so that we can do bookmarks
\def\setformulalistentry#1%
- {\gdef\doflushformulalistentry##1%
- {\normalexpanded{\noexpand\writetolist[\v!formula]{##1}}{#1}%
- \global\let\doflushformulalistentry\gobbleoneargument}}
+ {\gdef\namedformulaentry{#1}}
\protect \endinput
diff --git a/tex/context/base/strc-not.lua b/tex/context/base/strc-not.lua
index 94a252539..123a03807 100644
--- a/tex/context/base/strc-not.lua
+++ b/tex/context/base/strc-not.lua
@@ -17,9 +17,6 @@ local trace_notes = false trackers.register("structures.notes", function(v) tra
local report_notes = logs.new("notes")
local structures = structures
-
-structures.notes = structures.notes or { }
-
local helpers = structures.helpers
local lists = structures.lists
local sections = structures.sections
@@ -169,7 +166,7 @@ local function internal(tag,n)
if r then
local i = r.internal
--~ return i and lists.internals[i]
- return i and references.internals[i]
+ return i and references.internals[i] -- dependency on references
end
end
return nil
@@ -366,3 +363,11 @@ end
function notes.number(tag,n,spec)
lists.savedprefixednumber(tag,notedata[tag][n])
end
+
+function notes.internalid(tag,n)
+ local nd = get(tag,n)
+ if nd then
+ local r = nd.references
+ return r.internal
+ end
+end
diff --git a/tex/context/base/strc-not.mkiv b/tex/context/base/strc-not.mkiv
index 55006684f..361d7dfb0 100644
--- a/tex/context/base/strc-not.mkiv
+++ b/tex/context/base/strc-not.mkiv
@@ -647,11 +647,16 @@
\def\dolastnotesymbol
{\typesetsomenotesymbol\currentnote\currentnotenumber\currentdescriptionnumberentry}
+\let\dotagsetnotesymbol\relax
+
\def\dotypesetsomenotesymbol#1#2#3% running text (messy: #1 and current mixed)
{\dodonotesymbol
{\synchronizesomenotesymbol{#1}{#2}{#3}%
% \ctxlua{structures.notes.number("\currentnote",\currentnotenumber)}% \currentdescriptionnumberentry
+ \dostarttagged\t!descriptionsymbol\currentnote
+ \dotagsetnotesymbol
\ctxlua{structures.notes.number("#1",#2)}%
+ \dostoptagged
\domovednote{#1}{#2}\v!previouspage\v!nextpage}}
\unexpanded\def\typesetsomenotesymbol#1#2#3% running text
diff --git a/tex/context/base/strc-num.lua b/tex/context/base/strc-num.lua
index 6f1efec55..1b26a4a7a 100644
--- a/tex/context/base/strc-num.lua
+++ b/tex/context/base/strc-num.lua
@@ -10,6 +10,7 @@ local format = string.format
local next, type = next, type
local min, max = math.min, math.max
local texsprint, texcount = tex.sprint, tex.count
+local allocate, mark = utilities.storage.allocate, utilities.storage.mark
local trace_counters = false trackers.register("structures.counters", function(v) trace_counters = v end)
@@ -17,32 +18,38 @@ local report_counters = logs.new("counters")
local structures = structures
-local helpers = structures.helpers
-local sections = structures.sections
-local counters = structures.counters
-local documents = structures.documents
+local helpers = structures.helpers
+local sections = structures.sections
+local counters = structures.counters
+local documents = structures.documents
-local variables = interfaces.variables
-
-counters.data = counters.data or { }
-counters.specials = counters.specials or { }
+local variables = interfaces.variables
-- state: start stop none reset
-local counterdata = counters.data
+counters.specials = counters.specials or { }
+local counterspecials = counters.specials
+
local counterranges, tbs = { }, 0
-local counterspecials = counters.specials
-counters.collected = counters.collected or { }
+counters.collected = allocate()
counters.tobesaved = counters.tobesaved or { }
+counters.data = counters.data or { }
storage.register("structures/counters/data", counters.data, "structures.counters.data")
storage.register("structures/counters/tobesaved", counters.tobesaved, "structures.counters.tobesaved")
-local collected, tobesaved = counters.collected, counters.tobesaved
+local collected = counters.collected
+local tobesaved = counters.tobesaved
+local counterdata = counters.data
+
+local function initializer() -- not really needed
+ collected = counters.collected
+ tobesaved = counters.tobesaved
+ counterdata = counters.data
+end
local function finalizer()
- local ct = counters.tobesaved
for name, cd in next, counterdata do
local cs = tobesaved[name]
local data = cd.data
@@ -55,13 +62,7 @@ local function finalizer()
end
end
-local function initializer()
- collected, tobesaved = counters.collected, counters.tobesaved
-end
-
-if job then
- job.register('structures.counters.collected', counters.tobesaved, initializer, finalizer)
-end
+job.register('structures.counters.collected', tobesaved, initializer, finalizer)
local function constructor(t,s,name,i)
if s == "last" then
@@ -267,7 +268,6 @@ function counters.setoffset(name,value)
counters.setvalue(name,"offset",value)
end
-
local function synchronize(name,d)
local dc = d.counter
if dc then
@@ -465,54 +465,54 @@ function counters.converted(name,spec) -- name can be number and reference to st
end
end
--- move to strc-pag.lua
-
-function counters.analyse(name,counterspecification)
- local cd = counterdata[name]
- -- safeguard
- if not cd then
- return false, false, "no counter data"
- end
- -- section data
- local sectiondata = sections.current()
- if not sectiondata then
- return cd, false, "not in section"
- end
- local references = sectiondata.references
- if not references then
- return cd, false, "no references"
- end
- local section = references.section
- if not section then
- return cd, false, "no section"
- end
- sectiondata = sections.collected[references.section]
- if not sectiondata then
- return cd, false, "no section data"
- end
- -- local preferences
- local no = variables.no
- if counterspecification and counterspecification.prefix == no then
- return cd, false, "current spec blocks prefix"
- end
- -- stored preferences (not used)
- if cd.prefix == no then
- return cd, false, "entry blocks prefix"
- end
- -- sectioning
- -- if sectiondata.prefix == no then
- -- return false, false, "sectiondata blocks prefix"
- -- end
- -- final verdict
- return cd, sectiondata, "okay"
-end
-
-function counters.prefixedconverted(name,prefixspec,numberspec)
- local cd, prefixdata, result = counters.analyse(name,prefixspec)
- if cd then
- if prefixdata then
- sections.typesetnumber(prefixdata,"prefix",prefixspec or false,cd or false)
- end
- counters.converted(name,numberspec)
- end
-end
+--~ -- move to strc-pag.lua
+
+--~ function counters.analyze(name,counterspecification)
+--~ local cd = counterdata[name]
+--~ -- safeguard
+--~ if not cd then
+--~ return false, false, "no counter data"
+--~ end
+--~ -- section data
+--~ local sectiondata = sections.current()
+--~ if not sectiondata then
+--~ return cd, false, "not in section"
+--~ end
+--~ local references = sectiondata.references
+--~ if not references then
+--~ return cd, false, "no references"
+--~ end
+--~ local section = references.section
+--~ if not section then
+--~ return cd, false, "no section"
+--~ end
+--~ sectiondata = sections.collected[references.section]
+--~ if not sectiondata then
+--~ return cd, false, "no section data"
+--~ end
+--~ -- local preferences
+--~ local no = variables.no
+--~ if counterspecification and counterspecification.prefix == no then
+--~ return cd, false, "current spec blocks prefix"
+--~ end
+--~ -- stored preferences (not used)
+--~ if cd.prefix == no then
+--~ return cd, false, "entry blocks prefix"
+--~ end
+--~ -- sectioning
+--~ -- if sectiondata.prefix == no then
+--~ -- return false, false, "sectiondata blocks prefix"
+--~ -- end
+--~ -- final verdict
+--~ return cd, sectiondata, "okay"
+--~ end
+
+--~ function counters.prefixedconverted(name,prefixspec,numberspec)
+--~ local cd, prefixdata, result = counters.analyze(name,prefixspec)
+--~ if cd then
+--~ if prefixdata then
+--~ sections.typesetnumber(prefixdata,"prefix",prefixspec or false,cd or false)
+--~ end
+--~ counters.converted(name,numberspec)
+--~ end
+--~ end
diff --git a/tex/context/base/strc-pag.lua b/tex/context/base/strc-pag.lua
index e7750815e..fde2de942 100644
--- a/tex/context/base/strc-pag.lua
+++ b/tex/context/base/strc-pag.lua
@@ -10,36 +10,38 @@ local texcount, format = tex.count, string.format
local ctxcatcodes = tex.ctxcatcodes
local texsprint, texwrite = tex.sprint, tex.write
+local allocate, mark = utilities.storage.allocate, utilities.storage.mark
local trace_pages = false trackers.register("structures.pages", function(v) trace_pages = v end)
local report_pages = logs.new("pages")
-local structures = structures
+local structures = structures
-structures.pages = structures.pages or { }
+local helpers = structures.helpers
+local sections = structures.sections
+local pages = structures.pages
+local processors = structures.processors
+local sets = structures.sets
+local counters = structures.counters
-local helpers = structures.helpers or { }
-local sections = structures.sections or { }
-local pages = structures.pages or { }
-local processors = structures.processors or { }
-local sets = structures.sets or { }
-local counters = structures.counters or { }
+local counterdata = counters.data
-local variables = interfaces.variables
+local variables = interfaces.variables
-- storage
-pages.collected = pages.collected or { }
-pages.tobesaved = pages.tobesaved or { }
+local collected, tobesaved = allocate(), allocate()
-local collected, tobesaved = pages.collected, pages.tobesaved
+pages.collected = collected
+pages.tobesaved = tobesaved
local function initializer()
- collected, tobesaved = pages.collected, pages.tobesaved
+ collected = mark(pages.collected)
+ tobesaved = mark(pages.tobesaved)
end
-job.register('structures.pages.collected', pages.tobesaved, initializer)
+job.register('structures.pages.collected', tobesaved, initializer)
local specification = { } -- to be checked
@@ -77,23 +79,6 @@ function counters.specials.userpage()
end
end
---~ function pages.pagenumber(localspec)
---~ local deltaspec
---~ if localspec then
---~ for k,v in next, localspec do
---~ if v ~= "" and v ~= specification[k] then
---~ if not deltaspec then deltaspec = { } end
---~ deltaspec[k] = v
---~ end
---~ end
---~ end
---~ if deltaspec then
---~ return { realpage = texcount.realpageno, specification = deltaspec }
---~ else
---~ return { realpage = texcount.realpageno }
---~ end
---~ end
-
local function convertnumber(str,n)
return format("\\convertnumber{%s}{%s}",str or "numbers",n)
end
@@ -122,7 +107,7 @@ end
-- (pagespec.prefix == yes|unset) and (pages.prefix == yes) => prefix
-function pages.analyse(entry,pagespecification)
+function pages.analyze(entry,pagespecification)
-- safeguard
if not entry then
return false, false, "no entry"
@@ -163,7 +148,7 @@ end
function helpers.page(data,pagespec)
if data then
- local pagedata = pages.analyse(data,pagespec)
+ local pagedata = pages.analyze(data,pagespec)
if pagedata then
pages.number(pagedata,pagespec)
end
@@ -172,7 +157,7 @@ end
function helpers.prefixpage(data,prefixspec,pagespec)
if data then
- local pagedata, prefixdata, e = pages.analyse(data,pagespec)
+ local pagedata, prefixdata, e = pages.analyze(data,pagespec)
if pagedata then
if prefixdata then
sections.typesetnumber(prefixdata,"prefix",prefixspec or false,prefixdata or false,pagedata.prefixdata or false)
@@ -194,7 +179,7 @@ end
--
-function helpers.analyse(entry,specification)
+function helpers.analyze(entry,specification)
-- safeguard
if not entry then
return false, false, "no entry"
@@ -228,7 +213,7 @@ end
function helpers.prefix(data,prefixspec)
if data then
- local _, prefixdata, status = helpers.analyse(data,prefixspec)
+ local _, prefixdata, status = helpers.analyze(data,prefixspec)
if prefixdata then
sections.typesetnumber(prefixdata,"prefix",prefixspec or false,data.prefixdata or false,prefixdata or false)
end
@@ -243,3 +228,55 @@ function pages.is_odd(n)
return n % 2 ~= 0
end
end
+
+-- move to strc-pag.lua
+
+function counters.analyze(name,counterspecification)
+ local cd = counterdata[name]
+ -- safeguard
+ if not cd then
+ return false, false, "no counter data"
+ end
+ -- section data
+ local sectiondata = sections.current()
+ if not sectiondata then
+ return cd, false, "not in section"
+ end
+ local references = sectiondata.references
+ if not references then
+ return cd, false, "no references"
+ end
+ local section = references.section
+ if not section then
+ return cd, false, "no section"
+ end
+ sectiondata = sections.collected[references.section]
+ if not sectiondata then
+ return cd, false, "no section data"
+ end
+ -- local preferences
+ local no = variables.no
+ if counterspecification and counterspecification.prefix == no then
+ return cd, false, "current spec blocks prefix"
+ end
+ -- stored preferences (not used)
+ if cd.prefix == no then
+ return cd, false, "entry blocks prefix"
+ end
+ -- sectioning
+ -- if sectiondata.prefix == no then
+ -- return false, false, "sectiondata blocks prefix"
+ -- end
+ -- final verdict
+ return cd, sectiondata, "okay"
+end
+
+function counters.prefixedconverted(name,prefixspec,numberspec)
+ local cd, prefixdata, result = counters.analyze(name,prefixspec)
+ if cd then
+ if prefixdata then
+ sections.typesetnumber(prefixdata,"prefix",prefixspec or false,cd or false)
+ end
+ counters.converted(name,numberspec)
+ end
+end
diff --git a/tex/context/base/strc-pag.mkiv b/tex/context/base/strc-pag.mkiv
index 641acc9dc..4b76dced6 100644
--- a/tex/context/base/strc-pag.mkiv
+++ b/tex/context/base/strc-pag.mkiv
@@ -17,10 +17,6 @@
\unprotect
-% Hacks:
-
-\let\preparepageprefix\gobbleoneargument
-
% Allocation:
\countdef\realpageno = 0 \realpageno = 1
@@ -40,7 +36,7 @@
\def\initializepagecounters{\the\everyinitializepagecounters}
\appendtoks
- \initializepagecounters
+ \initializepagecounters
\to \everyjob
% Page numbers are kind of independent of each other and therefore they
diff --git a/tex/context/base/strc-ref.lua b/tex/context/base/strc-ref.lua
index 398afefba..1d7a94c44 100644
--- a/tex/context/base/strc-ref.lua
+++ b/tex/context/base/strc-ref.lua
@@ -9,6 +9,9 @@ if not modules then modules = { } end modules ['strc-ref'] = {
local format, find, gmatch, match, concat = string.format, string.find, string.gmatch, string.match, table.concat
local lpegmatch = lpeg.match
local texsprint, texwrite, texcount, texsetcount = tex.sprint, tex.write, tex.count, tex.setcount
+local allocate, mark = utilities.storage.allocate, utilities.storage.mark
+
+local allocate = utilities.storage.allocate
local trace_referencing = false trackers.register("structures.referencing", function(v) trace_referencing = v end)
@@ -19,6 +22,7 @@ local variables = interfaces.variables
local constants = interfaces.constants
local settings_to_array = utilities.parsers.settings_to_array
+local unsetvalue = attributes.unsetvalue
-- beware, this is a first step in the rewrite (just getting rid of
-- the tuo file); later all access and parsing will also move to lua
@@ -27,35 +31,43 @@ local settings_to_array = utilities.parsers.settings_to_array
-- todo: pack exported data
local structures = structures
-
-structures.references = structures.references or { }
-
local helpers = structures.helpers
local sections = structures.sections
local references = structures.references
local lists = structures.lists
local counters = structures.counters
-references.tobesaved = references.tobesaved or { }
-references.collected = references.collected or { }
-
-references.defined = references.defined or { } -- indirect ones
-references.derived = references.derived or { } -- taken from lists
-references.specials = references.specials or { } -- system references
-references.runners = references.runners or { }
-references.internals = references.internals or { }
-references.exporters = references.exporters or { }
-references.imported = references.imported or { }
-references.filters = references.filters or { }
-
-local filters = references.filters
+-- some might become local
+
+references.defined = references.defined or allocate()
+
+local defined = references.defined
+local derived = allocate()
+local specials = { } -- allocate()
+local runners = { } -- allocate()
+local internals = allocate()
+local exporters = allocate()
+local imported = allocate()
+local filters = allocate()
+local executers = allocate()
+local handlers = allocate()
+local tobesaved = allocate()
+local collected = allocate()
+
+references.derived = derived
+references.specials = specials
+references.runners = runners
+references.internals = internals
+references.exporters = exporters
+references.imported = imported
+references.filters = filters
+references.executers = executers
+references.handlers = handlers
+references.tobesaved = tobesaved
+references.collected = collected
storage.register("structures/references/defined", references.defined, "structures.references.defined")
-local tobesaved, collected = references.tobesaved, references.collected
-local defined, derived, specials = references.defined, references.derived, references.specials
-local exporters, runners = references.exporters, references.runners
-
local currentreference = nil
local initializers = { }
@@ -69,20 +81,21 @@ function references.registerfinalizer(func) -- we could use a token register ins
end
local function initializer()
- tobesaved, collected = references.tobesaved, references.collected
+ tobesaved = mark(references.tobesaved)
+ collected = mark(references.collected)
for i=1,#initializers do
initializers[i](tobesaved,collected)
end
end
local function finalizer()
- tobesaved = references.tobesaved
+ -- tobesaved = mark(references.tobesaved)
for i=1,#finalizers do
finalizers[i](tobesaved)
end
end
if job then
- job.register('structures.references.collected', references.tobesaved, initializer, finalizer)
+ job.register('structures.references.collected', tobesaved, initializer, finalizer)
end
-- todo: delay split till later as in destinations we split anyway
@@ -177,7 +190,7 @@ local special_reference = special * lparent * (operation * optional_arguments +
local scanner = (reset * outer_reference * (special_reference + inner_reference)^-1 * -1) / function() return result end
---~ function references.analyse(str) -- overloaded
+--~ function references.analyze(str) -- overloaded
--~ return lpegmatch(scanner,str)
--~ end
@@ -185,19 +198,19 @@ function references.split(str)
return lpegmatch(scanner,str or "")
end
---~ print(table.serialize(references.analyse("")))
---~ print(table.serialize(references.analyse("inner")))
---~ print(table.serialize(references.analyse("special(operation{argument,argument})")))
---~ print(table.serialize(references.analyse("special(operation)")))
---~ print(table.serialize(references.analyse("special()")))
---~ print(table.serialize(references.analyse("inner{argument}")))
---~ print(table.serialize(references.analyse("outer::")))
---~ print(table.serialize(references.analyse("outer::inner")))
---~ print(table.serialize(references.analyse("outer::special(operation{argument,argument})")))
---~ print(table.serialize(references.analyse("outer::special(operation)")))
---~ print(table.serialize(references.analyse("outer::special()")))
---~ print(table.serialize(references.analyse("outer::inner{argument}")))
---~ print(table.serialize(references.analyse("special(outer::operation)")))
+--~ print(table.serialize(references.analyze("")))
+--~ print(table.serialize(references.analyze("inner")))
+--~ print(table.serialize(references.analyze("special(operation{argument,argument})")))
+--~ print(table.serialize(references.analyze("special(operation)")))
+--~ print(table.serialize(references.analyze("special()")))
+--~ print(table.serialize(references.analyze("inner{argument}")))
+--~ print(table.serialize(references.analyze("outer::")))
+--~ print(table.serialize(references.analyze("outer::inner")))
+--~ print(table.serialize(references.analyze("outer::special(operation{argument,argument})")))
+--~ print(table.serialize(references.analyze("outer::special(operation)")))
+--~ print(table.serialize(references.analyze("outer::special()")))
+--~ print(table.serialize(references.analyze("outer::inner{argument}")))
+--~ print(table.serialize(references.analyze("special(outer::operation)")))
-- -- -- related to strc-ini.lua -- -- --
@@ -498,6 +511,9 @@ local function referencer(data)
}
end
+-- Exported and imported references ... not yet used but don't forget it
+-- and redo it.
+
function references.export(usedname)
local exported = { }
local e_references, e_lists = exporters.references, exporters.lists
@@ -550,7 +566,6 @@ end
function references.import(usedname)
if usedname then
- local imported = references.imported
local jdn = imported[usedname]
if not jdn then
local filename = files[usedname]
@@ -620,7 +635,12 @@ end
local function resolve(prefix,reference,args,set) -- we start with prefix,reference
texcount.referencehastexstate = 0
if reference and reference ~= "" then
- set = set or { }
+ if not set then
+ set = { prefix = prefix, reference = reference }
+ else
+ set.reference = set.reference or reference
+ set.prefix = set.prefix or prefix
+ end
local r = settings_to_array(reference)
for i=1,#r do
local ri = r[i]
@@ -665,7 +685,7 @@ end
references.currentset = nil
-local b, e = "\\ctxlua{local jc = references.currentset;", "}"
+local b, e = "\\ctxlua{local jc = structures.references.currentset;", "}"
local o, a = 'jc[%s].operation=[[%s]];', 'jc[%s].arguments=[[%s]];'
function references.expandcurrent() -- todo: two booleans: o_has_tex& a_has_tex
@@ -934,26 +954,43 @@ function references.doifelse(prefix,reference,highlight,newwindow,layer)
commands.doifelse(not unknown)
end
+local innermethod = "names"
+
+function references.setinnermethod(m)
+ innermethod = m -- page names mixed
+ function references.setinnermethod()
+ report_references("inner method is already set and frozen to '%s'",innermethod)
+ end
+end
+
+function references.getinnermethod()
+ return innermethod or "names"
+end
+
function references.setinternalreference(prefix,tag,internal,view)
- local t = { } -- maybe add to current
- if tag then
- if prefix and prefix ~= "" then
- prefix = prefix .. ":"
- for ref in gmatch(tag,"[^,]+") do
- t[#t+1] = prefix .. ref
- end
- else
- for ref in gmatch(tag,"[^,]+") do
- t[#t+1] = ref
+ if innermethod == "page" then
+ return unsetvalue
+ else
+ local t = { } -- maybe add to current
+ if tag then
+ if prefix and prefix ~= "" then
+ prefix = prefix .. ":"
+ for ref in gmatch(tag,"[^,]+") do
+ t[#t+1] = prefix .. ref
+ end
+ else
+ for ref in gmatch(tag,"[^,]+") do
+ t[#t+1] = ref
+ end
end
end
+ if internal and innermethod == "names" then -- mixed or page
+ t[#t+1] = "aut:" .. internal
+ end
+ local destination = references.mark(t,nil,nil,view) -- returns an attribute
+ texcount.lastdestinationattribute = destination
+ return destination
end
- if internal then
- t[#t+1] = "aut:" .. internal
- end
- local destination = references.mark(t,nil,nil,view) -- returns an attribute
- texcount.lastdestinationattribute = destination
- return destination
end
function references.getinternalreference(n) -- n points into list (todo: registers)
@@ -1139,7 +1176,7 @@ function references.sectionpage(n,prefixspec,pagespec)
helpers.prefixedpage(lists.collected[tonumber(n) or 0],prefixspec,pagespec)
end
--- analyse
+-- analyze
references.testrunners = references.testrunners or { }
references.testspecials = references.testspecials or { }
@@ -1147,15 +1184,15 @@ references.testspecials = references.testspecials or { }
local runners = references.testrunners
local specials = references.testspecials
-function references.analyse(actions)
+function references.analyze(actions)
actions = actions or references.currentset
if not actions then
actions = { realpage = 0 }
elseif actions.realpage then
- -- already analysed
+ -- already analyzed
else
-- we store some analysis data alongside the indexed array
- -- at this moment only the real reference page is analysed
+ -- at this moment only the real reference page is analyzed
-- normally such an analysis happens in the backend code
texcount.referencepagestate = 0
local nofactions = #actions
@@ -1183,13 +1220,13 @@ function references.analyse(actions)
end
function references.realpage() -- special case, we always want result
- local cs = references.analyse()
+ local cs = references.analyze()
texwrite(cs.realpage or 0)
end
--
-references.pages = {
+references.pages = allocate {
[variables.firstpage] = function() return counters.record("realpage")["first"] end,
[variables.previouspage] = function() return counters.record("realpage")["previous"] end,
[variables.nextpage] = function() return counters.record("realpage")["next"] end,
diff --git a/tex/context/base/strc-ref.mkiv b/tex/context/base/strc-ref.mkiv
index d5dadc63c..214303edf 100644
--- a/tex/context/base/strc-ref.mkiv
+++ b/tex/context/base/strc-ref.mkiv
@@ -465,7 +465,7 @@
%D be called.
\def\analyzecurrentreference
- {\ctxlua{structures.references.analyse()}}
+ {\ctxlua{structures.references.analyze()}}
%D The inner case is simple. Only two cases have to be taken
%D care of:
@@ -578,7 +578,7 @@
\newcount\locationorder
\newbox \locationbox
-\def\nextinternalreference {\the\locationcount}
+\def\nextinternalreference {\the\locationcount}
\def\nextinternalorderreference{\the\locationorder}
\def\setnextinternalreference
@@ -596,11 +596,13 @@
\hbox attr \destinationattribute\lastdestinationattribute{}%
\endgroup}
+\newconditional\preferpagereferences
+
\def\gotosomeinternal#1#2#3#4%
- {\ifinternalnamedreferences
- \directgoto{#4}[#1:#2]%
- \else
+ {\ifconditional\preferpagereferences
\directgoto{#4}[page(#3)]%
+ \else
+ \directgoto{#4}[#1:#2]%
\fi}
\def\gotonextinternal#1#2%
@@ -859,12 +861,12 @@
% Yet untested:
\unexpanded\def\somewhere#1#2#3[#4]% #3 gobbles space around #2 % todo
- {\doifreferencefoundelse{#4}% kind of inefficient as \goto also analyses
+ {\doifreferencefoundelse{#4}% kind of inefficient as \goto also analyzes
{\goto{\analyzecurrentreference\ifcase\referencepagestate\relax#1/#2\or#2\or#1\or#2\fi}[#4]}
{[#1/#2]}}
\unexpanded\def\atpage[#1]% todo
- {\doifreferencefoundelse{#1}% kind of inefficient as \goto also analyses
+ {\doifreferencefoundelse{#1}% kind of inefficient as \goto also analyzes
{\goto{\analyzecurrentreference\ifcase\referencepagestate
\labeltexts\v!page\dummyreference
\or
diff --git a/tex/context/base/strc-reg.lua b/tex/context/base/strc-reg.lua
index 2b6b6e30c..1e9781b38 100644
--- a/tex/context/base/strc-reg.lua
+++ b/tex/context/base/strc-reg.lua
@@ -12,15 +12,13 @@ local format, gmatch, concat = string.format, string.gmatch, table.concat
local utfchar = utf.char
local lpegmatch = lpeg.match
local ctxcatcodes = tex.ctxcatcodes
+local allocate, mark = utilities.storage.allocate, utilities.storage.mark
local trace_registers = false trackers.register("structures.registers", function(v) trace_registers = v end)
local report_registers = logs.new("registers")
local structures = structures
-
-structures.registers = structures.registers or { }
-
local registers = structures.registers
local helpers = structures.helpers
local sections = structures.sections
@@ -42,7 +40,7 @@ local matchingtilldepth, numberatdepth = sections.matchingtilldepth, sections.nu
-- some day we will share registers and lists (although there are some conceptual
-- differences in the application of keywords)
-local function filter_collected(names,criterium,number,collected,prevmode)
+local function filtercollected(names,criterium,number,collected,prevmode)
if not criterium or criterium == "" then criterium = variables.all end
local data = documents.data
local numbers, depth = data.numbers, data.depth
@@ -132,9 +130,9 @@ local function filter_collected(names,criterium,number,collected,prevmode)
end
elseif criterium == variables["local"] then
if sections.autodepth(data.numbers) == 0 then
- return filter_collected(names,variables.all,number,collected,prevmode)
+ return filtercollected(names,variables.all,number,collected,prevmode)
else
- return filter_collected(names,variables.current,number,collected,prevmode)
+ return filtercollected(names,variables.current,number,collected,prevmode)
end
else -- sectionname, number
-- beware, this works ok for registers
@@ -172,19 +170,20 @@ local function filter_collected(names,criterium,number,collected,prevmode)
return result
end
-registers.collected = registers.collected or { }
-registers.tobesaved = registers.tobesaved or { }
+local tobesaved, collected = allocate(), allocate()
-registers.filter_collected = filter_collected
+registers.collected = collected
+registers.tobesaved = tobesaved
+
+registers.filtercollected = filtercollected
-- we follow a different strategy than by lists, where we have a global
-- result table; we might do that here as well but since sorting code is
-- older we delay that decision
-local tobesaved, collected = registers.tobesaved, registers.collected
-
local function initializer()
- tobesaved, collected = registers.tobesaved, registers.collected
+ tobesaved = mark(registers.tobesaved)
+ collected = mark(registers.collected)
local internals = references.internals
for name, list in next, collected do
local entries = list.entries
@@ -201,7 +200,7 @@ local function initializer()
end
end
-job.register('structures.registers.collected', registers.tobesaved, initializer)
+job.register('structures.registers.collected', tobesaved, initializer)
local function allocate(class)
local d = tobesaved[class]
@@ -332,7 +331,7 @@ function registers.compare(a,b)
end
function registers.filter(data,options)
- data.result = registers.filter_collected(nil,options.criterium,options.number,data.entries,true)
+ data.result = registers.filtercollected(nil,options.criterium,options.number,data.entries,true)
end
function registers.prepare(data)
@@ -415,7 +414,7 @@ function registers.finalize(data,options)
data.result = split
end
-function registers.analysed(class,options)
+function registers.analyzed(class,options)
local data = collected[class]
if data and data.entries then
options = options or { }
@@ -446,6 +445,13 @@ end
function registers.flush(data,options,prefixspec,pagespec)
local equal = table.are_equal
+ -- local usedtags = { }
+ -- for i=1,#result do
+ -- usedtags[#usedtags+1] = result[i].tag
+ -- end
+ --
+ -- texsprint(ctxcatcodes,"\\def\\usedregistertags{",concat(usedtags,","),"}") -- todo: { } and escape special chars
+ --
texsprint(ctxcatcodes,"\\startregisteroutput")
local collapse_singles = options.compress == interfaces.variables.yes
local collapse_ranges = options.compress == interfaces.variables.all
@@ -453,8 +459,8 @@ function registers.flush(data,options,prefixspec,pagespec)
-- todo ownnumber
local function pagenumber(entry)
local er = entry.references
- texsprint(ctxcatcodes,format("\\registeronepage{%s}{%s}{",er.internal or 0,er.realpage or 0)) -- internal realpage content
local proc = entry.processors and entry.processors[2]
+ texsprint(ctxcatcodes,"\\registeronepage{",er.internal or 0,"}{",er.realpage or 0,"}{") -- internal realpage content
if proc then
texsprint(ctxcatcodes,"\\applyprocessor{",proc,"}{")
helpers.prefixpage(entry,prefixspec,pagespec)
@@ -466,8 +472,8 @@ function registers.flush(data,options,prefixspec,pagespec)
end
local function pagerange(f_entry,t_entry,is_last)
local er = f_entry.references
- texsprint(ctxcatcodes,format("\\registerpagerange{%s}{%s}{",er.internal or 0,er.realpage or 0))
local proc = f_entry.processors and f_entry.processors[2]
+ texsprint(ctxcatcodes,"\\registerpagerange{",er.internal or 0,"}{",er.realpage or 0,"}{")
if proc then
texsprint(ctxcatcodes,"\\applyprocessor{",proc,"}{")
helpers.prefixpage(f_entry,prefixspec,pagespec)
@@ -476,7 +482,7 @@ function registers.flush(data,options,prefixspec,pagespec)
helpers.prefixpage(f_entry,prefixspec,pagespec)
end
local er = t_entry.references
- texsprint(ctxcatcodes,format("}{%s}{%s}{",er.internal or 0,er.lastrealpage or er.realpage or 0))
+ texsprint(ctxcatcodes,"}{",er.internal or 0,"}{",er.lastrealpage or er.realpage or 0,"}{")
if is_last then
if proc then
texsprint(ctxcatcodes,"\\applyprocessor{",proc,"}{")
@@ -502,7 +508,7 @@ function registers.flush(data,options,prefixspec,pagespec)
local done = { false, false, false, false }
local data = sublist.data
local d, n = 0, 0
- texsprint(ctxcatcodes,format("\\startregistersection{%s}",sublist.tag))
+ texsprint(ctxcatcodes,"\\startregistersection{",sublist.tag,"}")
while d < #data do
d = d + 1
local entry = data[d]
@@ -516,7 +522,7 @@ function registers.flush(data,options,prefixspec,pagespec)
if e[i] and e[i] ~= "" then
done[i] = e[i]
if n == i then
- texsprint(ctxcatcodes,format("\\stopregisterentries\\startregisterentries{%s}",n))
+ texsprint(ctxcatcodes,"\\stopregisterentries\\startregisterentries{",n,"}")
else
while n > i do
n = n - 1
@@ -524,11 +530,12 @@ function registers.flush(data,options,prefixspec,pagespec)
end
while n < i do
n = n + 1
- texsprint(ctxcatcodes,format("\\startregisterentries{%s}",n))
+ texsprint(ctxcatcodes,"\\startregisterentries{",n,"}")
end
end
+ local internal = entry.references.internal
if metadata then
- texsprint(ctxcatcodes,"\\registerentry{")
+ texsprint(ctxcatcodes,"\\registerentry{",internal,"}{")
local proc = entry.processors and entry.processors[1]
if proc then
texsprint(ctxcatcodes,"\\applyprocessor{",proc,"}{")
@@ -541,11 +548,9 @@ function registers.flush(data,options,prefixspec,pagespec)
else
local proc = entry.processors and entry.processors[1]
if proc then
- texsprint(ctxcatcodes,"\\applyprocessor{",proc,"}{")
- texsprint(ctxcatcodes,format("\\registerentry{%s}",e[i]))
- texsprint(ctxcatcodes,"}")
+ texsprint(ctxcatcodes,"\\applyprocessor{",proc,"}{\\registerentry{",internal,"}{",e[i],"}}")
else
- texsprint(ctxcatcodes,format("\\registerentry{%s}",e[i]))
+ texsprint(ctxcatcodes,"\\registerentry{",internal,"}{",e[i],"}")
end
end
else
@@ -709,11 +714,9 @@ function registers.flush(data,options,prefixspec,pagespec)
texsprint(ctxcatcodes,"\\startregisterseewords")
local proc = entry.processors and entry.processors[1]
if proc then
- texsprint(ctxcatcodes,"\\applyprocessor{",proc,"}{")
- texsprint(ctxcatcodes,format("\\registeroneword{0}{0}{%s}",entry.seeword.text)) -- todo: internal
- texsprint(ctxcatcodes,"}")
+ texsprint(ctxcatcodes,"\\applyprocessor{",proc,"}{\\registeroneword{0}{0}{",entry.seeword.text,"}}") -- todo: internal
else
- texsprint(ctxcatcodes,format("\\registeroneword{0}{0}{%s}",entry.seeword.text)) -- todo: internal
+ texsprint(ctxcatcodes,"\\registeroneword{0}{0}{",entry.seeword.text,"}") -- todo: internal
end
texsprint(ctxcatcodes,"\\stopregisterseewords")
end
@@ -730,12 +733,13 @@ function registers.flush(data,options,prefixspec,pagespec)
data.metadata.sorted = false
end
-function registers.analyse(class,options)
- texwrite(registers.analysed(class,options))
+function registers.analyze(class,options)
+ texwrite(registers.analyzed(class,options))
end
function registers.process(class,...)
- if registers.analysed(class,...) > 0 then
+ if registers.analyzed(class,...) > 0 then
registers.flush(collected[class],...)
end
end
+
diff --git a/tex/context/base/strc-reg.mkiv b/tex/context/base/strc-reg.mkiv
index 4c1c4be96..06c719a97 100644
--- a/tex/context/base/strc-reg.mkiv
+++ b/tex/context/base/strc-reg.mkiv
@@ -117,6 +117,7 @@
\c!prefix=\v!both,
%\c!expansion=,
%\c!xmlsetup=,
+ \c!pagenumber=\v!yes,
\c!pageprefixconnector=\endash,
\c!pagesegments=2:2,
\c!file=\jobname,
@@ -451,7 +452,7 @@
{\begingroup
\edef\currentregister{#1}%
\setupregister[\currentregister][#2]%
- \normalexpanded{\endgroup\noexpand\xdef\noexpand\utilityregisterlength{\ctxlua{structures.registers.analyse('\currentregister',{
+ \normalexpanded{\endgroup\noexpand\xdef\noexpand\utilityregisterlength{\ctxlua{structures.registers.analyze('\currentregister',{
language = "\registerparameter\s!language",
compress = "\registerparameter\c!compress",
criterium = "\registerparameter\c!criterium",
@@ -563,9 +564,26 @@
{\doifsomething{#1}
{\doifnot{#1}\s!unknown
{\doifelse{\registerparameter\c!indicator}\v!yes
- {\expandcheckedcsname{\??id:\c!indicator:}{\registerparameter\c!alternative}{a}{#1}}
+ {\doregistercharacter{#1}}
{\noregistercharacter{#1}}}}}
+% \def\doregistercharacter#1%
+% {\expandcheckedcsname % why no \executeifdefined
+% {\??id:\c!indicator:}%
+% {\registerparameter\c!alternative}%
+% {a}%
+% {#1}}
+
+\def\doregistercharacter#1%
+ {\iflocation
+ \pagereference[\currentregister:\v!section:#1]%
+ \fi
+ \expandcheckedcsname % why no \executeifdefined
+ {\??id:\c!indicator:}%
+ {\registerparameter\c!alternative}%
+ {a}%
+ {#1}}
+
\def\noregistercharacter#1%
{\registerparameter\c!before
\goodbreak}
@@ -701,11 +719,21 @@
\registerseeword{#3}%
\dostoptagged}
-\def\defaultregisterentry#1%
+\def\doapplyregisterentrycommand#1%
{\dostarttagged\t!registerentry\empty
\registerparameter\c!textcommand{\limitedregisterentry{\registerparameter\c!deeptextcommand{#1}}}%
\dostoptagged}
+\def\defaultregisterentry#1#2% #1 is pageref
+ {\def\currentregisterpageindex{#1}%
+ \iflocation
+ \doifelse{\registerparameter\c!interaction}\v!text
+ {\directgoto{\setlocationcolor\??ia\doapplyregisterentrycommand{#2}}[internal(#1)]}
+ {\doapplyregisterentrycommand{#2}}%
+ \else
+ \doapplyregisterentrycommand{#2}%
+ \fi}
+
\def\defaultregisterseeword#1%
{\dostarttagged\t!registersee\empty
\labeltexts\v!see{#1}%
@@ -760,7 +788,9 @@
\def\registerpagerange {\registerpagebuttonsymbol\gobblesixarguments}}
\def\setregisterpagerendering
- {\edef\currentregisterpagesymbol{\registerparameter\c!symbol}%
+ {\doifelse{\registerparameter\c!pagenumber}\v!no
+ {\let \currentregisterpagesymbol\v!none}
+ {\edef\currentregisterpagesymbol{\registerparameter\c!symbol}}%
\ifx\currentregisterpagesymbol\empty
\csname\??id:\c!symbol:\c!n\endcsname
\else\ifcsname\??id:\c!symbol:\currentregisterpagesymbol\endcsname
diff --git a/tex/context/base/strc-ren.mkiv b/tex/context/base/strc-ren.mkiv
index ea066569f..abec640ac 100644
--- a/tex/context/base/strc-ren.mkiv
+++ b/tex/context/base/strc-ren.mkiv
@@ -146,7 +146,7 @@
\let\currentstructurereferenceattribute\currentreferenceattribute
\else\ifx\currentstructureheadinteraction\v!reference
% setuphead[<section>][interaction=reference,...] start<section>[backreference=abc,...]
- \edef\currentstructureheadbackreference{\structurevariable{references.\c!backreference}}%
+ \edef\currentstructureheadbackreference{\structurevariable{references.backreference}}%
\ifx\currentstructureheadbackreference\empty \else
\dogetsimplepagereference\currentstructureheadbackreference
\let\currentstructurereferenceattribute\currentreferenceattribute
@@ -159,8 +159,7 @@
{\beginheadplacement
\doresettructureheadnumbercontent
\ifconditional\structureheadleaveempty
- \setbox\sectionheadbox\ifvertical\vbox\else\hbox\fi \structureheadattribute to \zeropoint
- {\dosetlocalgridsnapping{\structureheadparameter\c!internalgrid}#1}%
+ \setbox\sectionheadbox\ifvertical\vbox\else\hbox\fi \structureheadattribute to \zeropoint{#1}%
\makestrutofbox\sectionheadbox
\else
\docheckstructureheadreference
diff --git a/tex/context/base/strc-sec.mkiv b/tex/context/base/strc-sec.mkiv
index 1dcd7f26b..bf1399a31 100644
--- a/tex/context/base/strc-sec.mkiv
+++ b/tex/context/base/strc-sec.mkiv
@@ -370,7 +370,7 @@
\unexpanded\def\fullstructureheadnumber
{\edef\currentstructureheadlabeltag{\currentstructureblock\c!label}%
- \dostarttagged\t!structurenumber\empty
+ \dostarttagged\t!sectionnumber\empty
\labeltexts{\structureheadparameter\currentstructureheadlabeltag}{\structurenumber}%
\dostoptagged}
@@ -378,7 +378,7 @@
% \unexpanded\def\fullstructureheadtitle{\structureautocatcodedget{titledata.title}{\structureheadparameter\s!catcodes}}
\unexpanded\def\fullstructureheadtitle
- {\dostarttagged\t!structuretitle\empty
+ {\dostarttagged\t!sectiontitle\empty
\ctxlua{structures.sections.title()}%
\dostoptagged}
@@ -536,7 +536,7 @@
\flushingcolumnfloatstrue
\setfalse\ignorehandlepagebreak
% ignorespaces prevents spaces creeping in when after=\dontleavehmode
- \dostarttagged\t!structurecontent\empty
+ \dostarttagged\t!sectioncontent\empty
\ifconditional\structureheadisdisplay % \ifdisplaysectionhead
\ignorespaces
\else
@@ -589,7 +589,7 @@
{\docheckstructureheadbefore
\dohandlestructureheadpagebreak
\structureheadparameter\c!inbetween
- \dostarttagged\t!structure\currentstructurehead}
+ \dostarttagged\t!section\currentstructurehead}
\def\dostructureheadspacingbeforenop
{\docheckstructureheadbefore
diff --git a/tex/context/base/strc-syn.lua b/tex/context/base/strc-syn.lua
index fed5bf58c..005036843 100644
--- a/tex/context/base/strc-syn.lua
+++ b/tex/context/base/strc-syn.lua
@@ -8,23 +8,23 @@ if not modules then modules = { } end modules ['str-syn'] = {
local next, type = next, type
local texwrite, texsprint, format = tex.write, tex.sprint, string.format
+local allocate, mark = utilities.storage.allocate, utilities.storage.mark
local ctxcatcodes = tex.ctxcatcodes
-- interface to tex end
local structures = structures
-
-structures.synonyms = structures.synonyms or { }
local synonyms = structures.synonyms
-synonyms.collected = synonyms.collected or { }
-synonyms.tobesaved = synonyms.tobesaved or { }
+local collected, tobesaved = allocate(), allocate()
-local collected, tobesaved = synonyms.collected, synonyms.tobesaved
+synonyms.collected = collected
+synonyms.tobesaved = tobesaved
local function initializer()
- collected, tobesaved = synonyms.collected, synonyms.tobesaved
+ collected = mark(synonyms.collected)
+ tobesaved = mark(synonyms.tobesaved)
end
local function finalizer()
@@ -33,7 +33,7 @@ local function finalizer()
end
end
-job.register('structures.synonyms.collected', synonyms.tobesaved, initializer, finalizer)
+job.register('structures.synonyms.collected', tobesaved, initializer, finalizer)
local function allocate(class)
local d = tobesaved[class]
@@ -171,7 +171,7 @@ function synonyms.flush(data,options) -- maybe pass the settings differently
data.metadata.sorted = false
end
-function synonyms.analysed(class,options)
+function synonyms.analyzed(class,options)
local data = synonyms.collected[class]
if data and data.entries then
options = options or { }
@@ -186,7 +186,7 @@ function synonyms.analysed(class,options)
end
function synonyms.process(class,options)
- if synonyms.analysed(class,options) then
+ if synonyms.analyzed(class,options) then
synonyms.flush(synonyms.collected[class],options)
end
end
diff --git a/tex/context/base/strc-tag.lua b/tex/context/base/strc-tag.lua
index 2df387e73..7701cc57e 100644
--- a/tex/context/base/strc-tag.lua
+++ b/tex/context/base/strc-tag.lua
@@ -11,7 +11,8 @@ if not modules then modules = { } end modules ['strc-tag'] = {
local insert, remove, unpack, concat = table.insert, table.remove, table.unpack, table.concat
local gsub, find, topattern, format = string.gsub, string.find, string.topattern, string.format
local lpegmatch = lpeg.match
-local texattribute = tex.attribute
+local texattribute, texsprint, ctxcatcodes = tex.attribute, tex.sprint, tex.ctxcatcodes
+local allocate = utilities.storage.allocate
local trace_tags = false trackers.register("structures.tags", function(v) trace_tags = v end)
@@ -20,30 +21,147 @@ local report_tags = logs.new("tags")
local attributes, structures = attributes, structures
local a_tagged = attributes.private('tagged')
-local a_image = attributes.private('image')
local unsetvalue = attributes.unsetvalue
local codeinjections = backends.codeinjections
-local taglist, labels, stack, chain, ids, enabled = { }, { }, { }, { }, { }, false -- no grouping assumed
+local taglist = allocate()
+local properties = allocate()
+local labels = allocate()
+local stack = { }
+local chain = { }
+local ids = { }
+local enabled = false
-structures.tags = structures.tags or { }
-local tags = structures.tags
-tags.taglist = taglist -- can best be hidden
+local tags = structures.tags
+tags.taglist = taglist -- can best be hidden
+tags.labels = labels
-function tags.start(tag,label,detail)
- labels[tag] = label ~= "" and label or tag
- if detail and detail ~= "" then
- tag = tag .. ":" .. detail
+local properties = allocate {
+
+ document = { pdf = "Div", nature = "display" },
+
+ division = { pdf = "Div", nature = "display" },
+ paragraph = { pdf = "P", nature = "mixed" },
+ construct = { pdf = "Span", nature = "inline" },
+
+ section = { pdf = "Sect", nature = "display" },
+ sectiontitle = { pdf = "H", nature = "mixed" },
+ sectionnumber = { pdf = "H", nature = "mixed" },
+ sectioncontent = { pdf = "Div", nature = "display" },
+
+ itemgroup = { pdf = "L", nature = "display" },
+ item = { pdf = "Li", nature = "display" },
+ itemtag = { pdf = "Lbl", nature = "mixed" },
+ itemcontent = { pdf = "LBody", nature = "mixed" },
+
+ description = { pdf = "Li", nature = "display" },
+ descriptiontag = { pdf = "Lbl", nature = "mixed" },
+ descriptioncontent = { pdf = "LBody", nature = "mixed" },
+ descriptionsymbol = { pdf = "Span", nature = "inline" }, -- note reference
+
+ verbatimblock = { pdf = "Code", nature = "display" },
+ verbatimline = { pdf = "Code", nature = "display" },
+ verbatim = { pdf = "Code", nature = "inline" },
+
+ register = { pdf = "Div", nature = "display" },
+ registersection = { pdf = "Div", nature = "display" },
+ registertag = { pdf = "Span", nature = "mixed" },
+ registerentries = { pdf = "Div", nature = "display" },
+ registerentry = { pdf = "Span", nature = "mixed" },
+ registersee = { pdf = "Span", nature = "mixed" },
+ registerpages = { pdf = "Span", nature = "mixed" },
+ registerpage = { pdf = "Span", nature = "inline" },
+
+ table = { pdf = "Table", nature = "display" },
+ tablerow = { pdf = "TR", nature = "display" },
+ tablecell = { pdf = "TD", nature = "mixed" },
+ tabulate = { pdf = "Table", nature = "display" },
+ tabulaterow = { pdf = "TR", nature = "display" },
+ tabulatecell = { pdf = "TD", nature = "mixed" },
+
+ list = { pdf = "TOC", nature = "display" },
+ listitem = { pdf = "TOCI", nature = "display" },
+ listtag = { pdf = "Lbl", nature = "mixed" },
+ listcontent = { pdf = "P", nature = "mixed" },
+ listdata = { pdf = "P", nature = "mixed" },
+ listpage = { pdf = "Reference", nature = "mixed" },
+
+ delimitedblock = { pdf = "BlockQuote", nature = "display" },
+ delimited = { pdf = "Quote", nature = "inline" },
+ subsentence = { pdf = "Span", nature = "inline" },
+
+ float = { pdf = "Div", nature = "display" },
+ floatcaption = { pdf = "Caption", nature = "display" },
+ floattag = { pdf = "Span", nature = "mixed" },
+ floattext = { pdf = "Span", nature = "mixed" },
+ floatcontent = { pdf = "P", nature = "mixed" },
+
+ image = { pdf = "P", nature = "mixed" },
+ mpgraphic = { pdf = "P", nature = "mixed" },
+
+ formulaset = { pdf = "Div", nature = "display" },
+ formula = { pdf = "Div", nature = "display" },
+ formulatag = { pdf = "Span", nature = "mixed" },
+ formulacontent = { pdf = "P", nature = "display" },
+ subformula = { pdf = "Div", nature = "display" },
+
+ link = { pdf = "Link", nature = "inline" },
+
+ margintextblock = { pdf = "Span", nature = "inline" },
+ margintext = { pdf = "Span", nature = "inline" },
+
+ math = { pdf = "Div", nature = "display" },
+ mn = { pdf = "Span", nature = "mixed" },
+ mi = { pdf = "Span", nature = "mixed" },
+ mo = { pdf = "Span", nature = "mixed" },
+ ms = { pdf = "Span", nature = "mixed" },
+ mrow = { pdf = "Span", nature = "display" },
+ msubsup = { pdf = "Span", nature = "display" },
+ msub = { pdf = "Span", nature = "display" },
+ msup = { pdf = "Span", nature = "display" },
+ merror = { pdf = "Span", nature = "mixed" },
+ munderover = { pdf = "Span", nature = "display" },
+ munder = { pdf = "Span", nature = "display" },
+ mover = { pdf = "Span", nature = "display" },
+ mtext = { pdf = "Span", nature = "mixed" },
+ mfrac = { pdf = "Span", nature = "display" },
+ mroot = { pdf = "Span", nature = "display" },
+ msqrt = { pdf = "Span", nature = "display" },
+
+}
+
+tags.properties = properties
+
+function tags.settagproperty(tag,key,value)
+ local p = properties[tag]
+ if p then
+ p[key] = value
+ else
+ properties[tag] = { [key] = value }
end
+end
+
+local lasttags = { }
+
+function tags.start(tag,label,detail)
if not enabled then
- codeinjections.enabletags(taglist,labels)
+ codeinjections.enabletags()
enabled = true
end
- local n = (ids[tag] or 0) + 1
- ids[tag] = n
- chain[#chain+1] = tag .. "-" .. n -- insert(chain,tag .. ":" .. n)
+ labels[tag] = label ~= "" and label or tag
+ local fulltag
+ if detail and detail ~= "" then
+ fulltag = tag .. ":" .. detail
+ else
+ fulltag = tag
+ end
local t = #taglist + 1
+ local n = (ids[fulltag] or 0) + 1
+ ids[fulltag] = n
+ lasttags[tag] = n
+--~ print("SETTING",tag,n)
+ chain[#chain+1] = fulltag .. "-" .. n -- insert(chain,tag .. ":" .. n)
stack[#stack+1] = t -- insert(stack,t)
taglist[t] = { unpack(chain) } -- we can add key values for alt and actualtext if needed
texattribute[a_tagged] = t
@@ -64,6 +182,10 @@ function tags.stop()
return t
end
+function tags.last(tag)
+ return lasttags[tag] -- or false
+end
+
function structures.atlocation(str)
local location = gsub(concat(taglist[texattribute[a_tagged]],"-"),"%-%d+","")
return find(location,topattern(str)) ~= nil
@@ -83,7 +205,7 @@ end)
directives.register("backend.addtags", function(v)
if not enabled then
- codeinjections.enabletags(taglist,labels)
+ codeinjections.enabletags()
enabled = true
end
end)
diff --git a/tex/context/base/strc-tag.mkiv b/tex/context/base/strc-tag.mkiv
index 05893c4c7..cdf4fe9b7 100644
--- a/tex/context/base/strc-tag.mkiv
+++ b/tex/context/base/strc-tag.mkiv
@@ -27,10 +27,10 @@
\def\t!paragraph {paragraph} % P
\def\t!construct {construct} % Span
-\def\t!structure {structure} % Sect
-\def\t!structuretitle {structuretitle} % H
-\def\t!structurenumber {structurenumber} % H
-\def\t!structurecontent {structurecontent} % Div
+\def\t!section {section} % Sect
+\def\t!sectiontitle {sectiontitle} % H
+\def\t!sectionnumber {sectionnumber} % H
+\def\t!sectioncontent {sectioncontent} % Div
\def\t!itemgroup {itemgroup} % L
\def\t!item {item} % Li
@@ -40,8 +40,10 @@
\def\t!description {description} % Li
\def\t!descriptiontag {descriptiontag} % Lbl
\def\t!descriptioncontent{descriptioncontent} % LBody
+\def\t!descriptionsymbol {descriptionsymbol} % Span
\def\t!verbatimblock {verbatimblock} % Code
+\def\t!verbatimline {verbatimline} % Code
\def\t!verbatim {verbatim} % Code
\def\t!register {register} % Div
@@ -54,11 +56,11 @@
\def\t!registerpage {registerpage} % Span
\def\t!table {table} % Table
-\def\t!tablerow {tr} % TR
-\def\t!tablecell {td} % TD
+\def\t!tablerow {tablerow} % TR
+\def\t!tablecell {tablecell} % TD
\def\t!tabulate {tabulate} % Table
-\def\t!tabulaterow {row} % TR
-\def\t!tabulatecell {cell} % TD
+\def\t!tabulaterow {tabulaterow} % TR
+\def\t!tabulatecell {tabulatecell} % TD
\def\t!list {list} % TOC
\def\t!listitem {listitem} % TOCI
@@ -89,6 +91,9 @@
\def\t!link {link} % Link
+\def\t!margintext {margintext} % Span
+\def\t!margintextblock {margintextblock} % Div
+
% \setuptaglabeltext
% [en]
% [\t!document=document]
@@ -98,8 +103,11 @@
\definesystemattribute[tagged] \chardef\taggedattribute \dogetattributeid{tagged}
\definesystemattribute[image] \chardef\imageattribute \dogetattributeid{image}
-% \def\mapelementtobackendtag {\dodoubleargument\domapelementtobackendtag}
-% \def\domapelementtobackendtag[#1][#2]{\ctxlua{backends.codeinjections.maptag("#1","#2")}}
+\def\setelementbackendtag{\dodoubleargument\dosetelementbackendtag}
+\def\setelementnature {\dodoubleargument\dosetelementnature}
+
+\def\dosetelementbackendtag[#1][#2]{\ctxlua{structures.tags.settagproperty("#1","backend","#2")}}
+\def\dosetelementnature [#1][#2]{\ctxlua{structures.tags.settagproperty("#1","nature", "#2")}}
% todo: indirect macro for trialtypesetting
diff --git a/tex/context/base/strc-xml.mkiv b/tex/context/base/strc-xml.mkiv
index 7a0099585..9696c2e06 100644
--- a/tex/context/base/strc-xml.mkiv
+++ b/tex/context/base/strc-xml.mkiv
@@ -13,12 +13,14 @@
\writestatus{loading}{ConTeXt Structure Macros / XML Processing}
+% This might disappear.
+
\unprotect
\startxmlsetups xml:ctx:internal:setup
- \xmlsetsetup{\xmldocument}{ctx:genericentry} {xml:ctx:genericentry}
- \xmlsetsetup{\xmldocument}{ctx:sectionentry} {xml:ctx:sectionentry}
- \xmlsetsetup{\xmldocument}{ctx:registerentry}{xml:ctx:registerentry}
+ \xmlsetsetup{#1}{ctx:genericentry} {xml:ctx:genericentry}
+ \xmlsetsetup{#1}{ctx:sectionentry} {xml:ctx:sectionentry}
+ \xmlsetsetup{#1}{ctx:registerentry}{xml:ctx:registerentry}
\stopxmlsetups
\xmlregistersetup{xml:ctx:internal:setup}
diff --git a/tex/context/base/supp-box.tex b/tex/context/base/supp-box.tex
index 4c398973e..dd83d76d4 100644
--- a/tex/context/base/supp-box.tex
+++ b/tex/context/base/supp-box.tex
@@ -2920,7 +2920,7 @@
\afterassignment\next\scratchdimen=#2}
\def\raisebox{\doraiselowerbox\raise}
-\def\lowerbox{\doraiselowerbox\raise}
+\def\lowerbox{\doraiselowerbox\lower}
% maybe some day we need this
%
diff --git a/tex/context/base/supp-fil.lua b/tex/context/base/supp-fil.lua
index 83cef81a1..7ab86a51d 100644
--- a/tex/context/base/supp-fil.lua
+++ b/tex/context/base/supp-fil.lua
@@ -121,7 +121,7 @@ local function readfilename(specification,backtrack,treetoo)
end
end
if not fnd and treetoo then
---~ fnd = resolvers.find_file(filename)
+--~ fnd = resolvers.findfile(filename)
fnd = resolvers.findtexfile(filename)
end
found[specification] = fnd
diff --git a/tex/context/base/supp-fil.mkiv b/tex/context/base/supp-fil.mkiv
index 6f89735e3..5b425df0c 100644
--- a/tex/context/base/supp-fil.mkiv
+++ b/tex/context/base/supp-fil.mkiv
@@ -318,10 +318,14 @@
\readfile{#1}{#2}{#3}%
\popcatcodetable}
-\def\readxmlfile#1#2#3%
- {\pushcatcodetable \catcodetable \xmlcatcodes
- \readfile{#1}{#2}{#3}%
- \popcatcodetable}
+\ifdefined \xmlcatcodes
+
+ \def\readxmlfile#1#2#3%
+ {\pushcatcodetable \catcodetable \xmlcatcodes
+ \readfile{#1}{#2}{#3}%
+ \popcatcodetable}
+
+\fi
%D \macros
%D {doiffileelse,doiflocfileelse}
diff --git a/tex/context/base/supp-mat.mkiv b/tex/context/base/supp-mat.mkiv
index 4aebddda9..02da286c3 100644
--- a/tex/context/base/supp-mat.mkiv
+++ b/tex/context/base/supp-mat.mkiv
@@ -50,6 +50,8 @@
\long\def\displaymath#1{\noindent \ifmmode#1\else\Ustartdisplaymath#1\Ustopdisplaymath\fi}
\long\def\inlinemath #1{\dontleavehmode\ifmmode#1\else\Ustartmath #1\Ustopmath \fi}
+\let\m\mathematics % unexpanded?
+
\let\stopmathmode\relax
\unexpanded\def\startmathmode % cannot be used nested
diff --git a/tex/context/base/syst-con.lua b/tex/context/base/syst-con.lua
index 638f3c797..2fb2ee8a2 100644
--- a/tex/context/base/syst-con.lua
+++ b/tex/context/base/syst-con.lua
@@ -13,6 +13,7 @@ converters = converters or { }
the top of <l n='luatex'/>'s char range but outside the unicode range.</p>
--ldx]]--
+local tonumber = tonumber
local char, texsprint, format = unicode.utf8.char, tex.sprint, string.format
function converters.hexstringtonumber(n) texsprint(tonumber(n,16)) end
diff --git a/tex/context/base/tabl-ltb.mkiv b/tex/context/base/tabl-ltb.mkiv
index a1f92cedc..34d93e5a9 100644
--- a/tex/context/base/tabl-ltb.mkiv
+++ b/tex/context/base/tabl-ltb.mkiv
@@ -814,17 +814,6 @@
\startlinetableanalysis\readfile{#1}\donothing\donothing\stoplinetableanalysis
\startlinetablerun \readfile{#1}\donothing\donothing\stoplinetablerun
\egroup}
-
-% will go away:
-
-\def\processlinetableXMLfile#1%
- {\bgroup
- \let\startlinetable\donothing
- \let\stoplinetable \donothing
- \startlinetableanalysis\processXMLfile{#1}\stoplinetableanalysis
- \startlinetablerun \processXMLfile{#1}\stoplinetablerun
- \egroup}
-
\protect \endinput
\doifnotmode{demo}{\endinput}
diff --git a/tex/context/base/tabl-ntb.mkiv b/tex/context/base/tabl-ntb.mkiv
index 3d5a90cd2..79835d92e 100644
--- a/tex/context/base/tabl-ntb.mkiv
+++ b/tex/context/base/tabl-ntb.mkiv
@@ -99,8 +99,11 @@
%D
%D \typebuffer \getbuffer
+\let\dotagTABLEalign\relax
+
\def\bTBLCELL % why not \doinhibitblank
{\inhibitblank
+ \dotagTABLEalign
\doconvertfont\tbltblstyle\empty
\everypar{\tbltblleft\delayedbegstrut}}
@@ -913,6 +916,8 @@
% \def\endtbl
% {\egroup}
+\let\dotagTABLEcell\relax
+
\def\begintbl
{\global\tblspn\zerocount
\global\tblcol\zerocount
@@ -923,8 +928,9 @@
\dostarttagged\t!tablerow\empty
\appendtoks\dostoptagged\dostarttagged\t!tablerow\empty\to\everycr
\halign\bgroup
- \registerparoptions % new
- \ignorespaces##\unskip&&\dostarttagged\t!tablecell\empty\ignorespaces##\unskip\dostoptagged\cr}
+ \registerparoptions
+ % watch out: tagging the cell happens at the outer level (faster)
+ \ignorespaces##\unskip&&\dostarttagged\t!tablecell\empty\ignorespaces##\unskip\dostoptagged\cr} % one too many
\def\endtbl
{\dostoptagged\egroup
@@ -977,11 +983,16 @@
\settblhei{#1}{\the\ht\scratchbox}%
\fi}%
+\let\dotagnoftablecolumns\relax
+\let\dotagnoftablerows \relax
+
\def\domakeTBLthree#1 #2 %
{% height
\!!counta \gettblcol{#1}{#2}\relax
\!!countb \gettblrow{#1}{#2}\relax
\!!heighta\gettblht {#1}{#2}\relax
+\dotagnoftablecolumns
+\dotagnoftablerows
\scratchdimen\zeropoint
\ifnum\!!counta=\maximumcol\relax
% case: nc=maxcolumns
@@ -1020,7 +1031,7 @@
\else
\normalexpanded{\noexpand\directgotobox{\box\scratchbox}[\!!stringa]}% to be checked
\fi
- \box\scratchbox}
+ \dotagTABLEcell} % right spot
\def\inTBLcell#1#2% hm, do we need #1 #2 ? we use tblcol anyway
{\ExpandBothAfter\doifinsetelse\localwidth{\v!fit,\v!broad} % user set
diff --git a/tex/context/base/tabl-tbl.mkiv b/tex/context/base/tabl-tbl.mkiv
index 9e0479283..6af82c740 100644
--- a/tex/context/base/tabl-tbl.mkiv
+++ b/tex/context/base/tabl-tbl.mkiv
@@ -326,6 +326,8 @@
\let\endreshapedtabulatepar\egroup
+\let\dotagtabulatealign\relax
+
\def\dodosettabulatepreamble#1#2% only makes sense for many tabulates
{\normalexpanded{\!!toksa{\the\!!toksa
&\hskip\pretabskip\noexpand\pretabrule##&%
@@ -349,6 +351,7 @@
\noexpand\bbskip
\bgroup % we cannot combine the if because a cell may have only one ##
\noexpand\dostarttagged\noexpand\t!tabulatecell\noexpand\empty
+\dotagtabulatealign
\noexpand#1%
\ifcase\tabulatereshape\else
\beginreshapedtabulatepar
@@ -1108,6 +1111,7 @@
\def\tabulatebskiptwo
{\ifvoid\tablebox\tabulatecolumn
+ % first line
\global\setbox\tablebox\tabulatecolumn\vbox
\bgroup
\glet\tabulatehook\notabulatehook
@@ -1117,6 +1121,7 @@
\ignorespaces
\let\eskip\tabulatebskiptwoeskip
\else
+ % successive lines
\let\eskip\empty
\dontcomplain
\glet\tabulatehook\dotabulatehook
@@ -1248,6 +1253,8 @@
\def\tabulateautorule{\doHR\plusone}%
\def\tabulateautoline{\doHL\plusone}%
+\newtoks\everytabulatepar
+
\bgroup \catcode`\|=\@@other
\gdef\processtabulate[|#1|]% in the process of optimizing
@@ -1325,6 +1332,7 @@
\fi
\dontcomplain
\forgetall % hm, interference with \forgetparindent ^^^ probably bug, to be solved
+ \everypar\everytabulatepar
\setbox0\vbox % outside \if because of line counting
{\notesenabledfalse
\let\tabulateindent\!!zeropoint
diff --git a/tex/context/base/task-ini.lua b/tex/context/base/task-ini.lua
index 1bbad4f9f..0f1af0c5a 100644
--- a/tex/context/base/task-ini.lua
+++ b/tex/context/base/task-ini.lua
@@ -26,7 +26,7 @@ tasks.appendaction("processors", "words", "languages.words.check")
tasks.appendaction("processors", "fonts", "builders.paragraphs.solutions.splitters.split") -- experimental
tasks.appendaction("processors", "fonts", "nodes.handlers.characters") -- maybe todo
-tasks.appendaction("processors", "fonts", "nodes.handlers.injectkerns") -- maybe todo
+tasks.appendaction("processors", "fonts", "nodes.injections.handler") -- maybe todo
tasks.appendaction("processors", "fonts", "nodes.handlers.protectglyphs", nil, "nohead") -- maybe todo
tasks.appendaction("processors", "fonts", "builders.kernel.ligaturing") -- always on
tasks.appendaction("processors", "fonts", "builders.kernel.kerning") -- always on
@@ -68,9 +68,9 @@ tasks.appendaction("finalizers", "fonts", "builders.paragraphs.solutions
-- rather new
tasks.appendaction("mvlbuilders", "normalizers", "nodes.handlers.migrate") --
-tasks.appendaction("mvlbuilders", "normalizers", "nodes.handlers.pagespacing") -- last !
+tasks.appendaction("mvlbuilders", "normalizers", "builders.vspacing.pagehandler") -- last !
-tasks.appendaction("vboxbuilders", "normalizers", "nodes.handlers.vboxspacing") --
+tasks.appendaction("vboxbuilders", "normalizers", "builders.vspacing.vboxhandler") --
-- speedup: only kick in when used
diff --git a/tex/context/base/toks-ini.lua b/tex/context/base/toks-ini.lua
index 67da7fea6..cb2807b24 100644
--- a/tex/context/base/toks-ini.lua
+++ b/tex/context/base/toks-ini.lua
@@ -135,7 +135,7 @@ function collectors.show(tag, method)
end
end
-function collectors.default_words(t,str)
+function collectors.defaultwords(t,str)
t[#t+1] = tokens.bgroup
t[#t+1] = createtoken("red")
for i=1,#str do
@@ -144,9 +144,9 @@ function collectors.default_words(t,str)
t[#t+1] = tokens.egroup
end
-function collectors.with_words(tag,handle)
+function collectors.dowithwords(tag,handle)
local t, w = { }, { }
- handle = handle or collectors.default_words
+ handle = handle or collectors.defaultwords
local tagdata = collectordata[tag]
for k=1,#tagdata do
local v = tagdata[k]
@@ -166,7 +166,7 @@ function collectors.with_words(tag,handle)
collectordata[tag] = t
end
-function collectors.show_token(t)
+local function showtoken(t)
if t then
local cmd, chr, id, cs, name = t[1], t[2], t[3], nil, command_name(t) or ""
if cmd == letter or cmd == other then
@@ -188,9 +188,11 @@ function collectors.show_token(t)
end
end
+collectors.showtoken = showtoken
+
function collectors.trace()
local t = get_next()
- texio.write_nl(collectors.show_token(t))
+ texio.write_nl(showtoken(t))
return t
end
diff --git a/tex/context/base/trac-deb.lua b/tex/context/base/trac-deb.lua
index 7e8f089a0..87355c61a 100644
--- a/tex/context/base/trac-deb.lua
+++ b/tex/context/base/trac-deb.lua
@@ -161,10 +161,8 @@ end)
lmx = lmx or { }
-if not lmx.variables then lmx.variables = { } end
-
lmx.htmfile = function(name) return environment.jobname .. "-status.html" end
-lmx.lmxfile = function(name) return resolvers.find_file(name,'tex') end
+lmx.lmxfile = function(name) return resolvers.findfile(name,'tex') end
function lmx.showdebuginfo(lmxname)
local variables = {
diff --git a/tex/context/base/trac-lmx.lua b/tex/context/base/trac-lmx.lua
index 47bca3b1e..b4bfc36d6 100644
--- a/tex/context/base/trac-lmx.lua
+++ b/tex/context/base/trac-lmx.lua
@@ -10,13 +10,15 @@ if not modules then modules = { } end modules ['trac-lmx'] = {
local gsub, format, concat, byte = string.gsub, string.format, table.concat, string.byte
+local allocate = utilities.storage.allocate
+
lmx = lmx or { }
local lmx = lmx
-lmx.variables = lmx.variables or { } -- global, shared
+lmx.variables = allocate()
local lmxvariables = lmx.variables
-local escapes = {
+local escapes = allocate {
['&'] = '&amp;',
['<'] = '&lt;',
['>'] = '&gt;',
@@ -76,7 +78,7 @@ local function do_variable(str)
end
function lmx.loadedfile(name)
- name = (resolvers and resolvers.find_file and resolvers.find_file(name)) or name
+ name = (resolvers and resolvers.findfile and resolvers.findfile(name)) or name
return io.loaddata(name)
end
diff --git a/tex/context/base/trac-set.lua b/tex/context/base/trac-set.lua
index 4cb1b65d1..3b99b489c 100644
--- a/tex/context/base/trac-set.lua
+++ b/tex/context/base/trac-set.lua
@@ -11,6 +11,7 @@ local concat = table.concat
local format, find, lower, gsub, partialescapedpattern = string.format, string.find, string.lower, string.gsub, string.partialescapedpattern
local is_boolean = string.is_boolean
local settings_to_hash = utilities.parsers.settings_to_hash
+local allocate = utilities.storage.allocate
utilities = utilities or { }
local utilities = utilities
@@ -205,7 +206,7 @@ local enable, disable, register, list, show = setters.enable, setters.disable, s
function setters.new(name)
local t -- we need to access it in t
t = {
- data = { }, -- indexed, but also default and value fields
+ data = allocate(), -- indexed, but also default and value fields
name = name,
enable = function(...) enable (t,...) end,
disable = function(...) disable (t,...) end,
diff --git a/tex/context/base/trac-tex.lua b/tex/context/base/trac-tex.lua
index dbd35b56e..ab9e73e6e 100644
--- a/tex/context/base/trac-tex.lua
+++ b/tex/context/base/trac-tex.lua
@@ -14,7 +14,7 @@ local trackers = trackers
local saved = { }
-function trackers.save_hash()
+function trackers.savehash()
saved = texhashtokens()
end
diff --git a/tex/context/base/typo-dig.lua b/tex/context/base/typo-dig.lua
index 36954a233..840ef54d8 100644
--- a/tex/context/base/typo-dig.lua
+++ b/tex/context/base/typo-dig.lua
@@ -41,6 +41,7 @@ local fontdata = fonts.identifiers
local chardata = fonts.characters
local quaddata = fonts.quads
local charbase = characters.data
+local getdigitwidth = fonts.getdigitwidth
typesetters = typesetters or { }
local typesetters = typesetters
@@ -86,7 +87,7 @@ actions[1] = function(head,start,attribute,attr)
local unic = chardata[font][char].tounicode
local what = unic and tonumber(unic,16) or char
if charbase[what].category == "nd" then
- local oldwidth, newwidth = start.width, fonts.get_digit_width(font)
+ local oldwidth, newwidth = start.width, getdigitwidth(font)
if newwidth ~= oldwidth then
if trace_digits then
report_digits("digit trigger %s, instance %s, char 0x%05X, unicode 0x%05X, delta %s",
diff --git a/tex/context/base/util-lua.lua b/tex/context/base/util-lua.lua
index 0174f667f..8562c6417 100644
--- a/tex/context/base/util-lua.lua
+++ b/tex/context/base/util-lua.lua
@@ -7,7 +7,7 @@ if not modules then modules = { } end modules ['util-lua'] = {
}
utilities = utilities or {}
-utilities.lua = utilities.merger or { }
+utilities.lua = utilities.lua or { }
utilities.report = utilities.report or print
function utilities.lua.compile(luafile,lucfile,cleanup,strip) -- defaults: cleanup=false strip=true
diff --git a/tex/context/base/util-sto.lua b/tex/context/base/util-sto.lua
new file mode 100644
index 000000000..49abd8c82
--- /dev/null
+++ b/tex/context/base/util-sto.lua
@@ -0,0 +1,90 @@
+if not modules then modules = { } end modules ['util-sto'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local setmetatable, getmetatable = setmetatable, getmetatable
+
+utilities = utilities or { }
+utilities.storage = utilities.storage or { }
+local storage = utilities.storage
+
+function storage.mark(t)
+ if not t then
+ texio.write_nl("fatal error: storage '%s' cannot be marked",t)
+ os.exit()
+ end
+ local m = getmetatable(t)
+ if not m then
+ m = { }
+ setmetatable(t,m)
+ end
+ m.__storage__ = true
+ return t
+end
+
+function storage.allocate(t)
+ t = t or { }
+ local m = getmetatable(t)
+ if not m then
+ m = { }
+ setmetatable(t,m)
+ end
+ m.__storage__ = true
+ return t
+end
+
+function storage.marked(t)
+ local m = getmetatable(t)
+ return m and m.__storage__
+end
+
+function storage.checked(t)
+ if not t then
+ texio.write_nl("fatal error: storage '%s' has not been allocated",t)
+ os.exit()
+ end
+ return t
+end
+
+function setmetatablekey(t,key,value)
+ local m = getmetatable(t)
+ if not m then
+ m = { }
+ setmetatable(t,m)
+ end
+ m[key] = value
+end
+
+function getmetatablekey(t,key,value)
+ local m = getmetatable(t)
+ return m and m[key]
+end
+
+--~ function utilities.storage.delay(parent,name,filename)
+--~ local m = getmetatable(parent)
+--~ m.__list[name] = filename
+--~ end
+--~
+--~ function utilities.storage.predefine(parent)
+--~ local list = { }
+--~ local m = getmetatable(parent) or {
+--~ __list = list,
+--~ __index = function(t,k)
+--~ local l = require(list[k])
+--~ t[k] = l
+--~ return l
+--~ end
+--~ }
+--~ setmetatable(parent,m)
+--~ end
+--~
+--~ bla = { }
+--~ utilities.storage.predefine(bla)
+--~ utilities.storage.delay(bla,"test","oepsoeps")
+--~ local t = bla.test
+--~ table.print(t)
+--~ print(t.a)
diff --git a/tex/context/base/x-calcmath.mkiv b/tex/context/base/x-calcmath.mkiv
index f271a7849..0b5793a96 100644
--- a/tex/context/base/x-calcmath.mkiv
+++ b/tex/context/base/x-calcmath.mkiv
@@ -76,12 +76,3 @@
% \calcmath{D(x+D(y))}
% \calcmath{Df(x)}
% \calcmath{g(x)}
-
-\calcmath{sqrt(sin^2(x)+cos^2(x))}
-
-\startXMLdata
-test <icm>sqrt(sin^2(x)+cos^2(x))</icm> test
-test <dcm>sqrt(sin^2(x)+cos^2(x))</dcm> test
-\stopXMLdata
-
-\stoptext
diff --git a/tex/context/base/x-chemml.mkiv b/tex/context/base/x-chemml.mkiv
index f9ed32960..658f3a452 100644
--- a/tex/context/base/x-chemml.mkiv
+++ b/tex/context/base/x-chemml.mkiv
@@ -230,26 +230,26 @@
\stopxmlsetups
\unexpanded\def\doCMLannotation#1% #2#3% loc caption text
- {\XMLval{cml:a:l}{#1}{\chemicalright}}% {#2}{#3}}
-
-\mapXMLvalue {cml:a:l} {t} {\chemicaltop}
-\mapXMLvalue {cml:a:l} {b} {\chemicalbottom}
-\mapXMLvalue {cml:a:l} {l} {\chemicalleft}
-\mapXMLvalue {cml:a:l} {r} {\chemicalright}
-\mapXMLvalue {cml:a:l} {lc} {\chemicalleftcentered} % \mapXMLvalue {cml:a:l} {cl} {\chemicalleftcentered}
-\mapXMLvalue {cml:a:l} {rc} {\chemicalrightcentered} % \mapXMLvalue {cml:a:l} {cr} {\chemicalrightcentered}
-\mapXMLvalue {cml:a:l} {tl} {\chemicaltopleft} % \mapXMLvalue {cml:a:l} {lt} {\chemicaltopleft}
-\mapXMLvalue {cml:a:l} {bl} {\chemicalbottomleft} % \mapXMLvalue {cml:a:l} {lb} {\chemicalbottomleft}
-\mapXMLvalue {cml:a:l} {tr} {\chemicaltopright} % \mapXMLvalue {cml:a:l} {rt} {\chemicaltopright}
-\mapXMLvalue {cml:a:l} {br} {\chemicalbottomright} % \mapXMLvalue {cml:a:l} {rb} {\chemicalbottomright}
-\mapXMLvalue {cml:a:l} {lt} {\chemicallefttop} % \mapXMLvalue {cml:a:l} {tl} {\chemicallefttop}
-\mapXMLvalue {cml:a:l} {lb} {\chemicalleftbottom} % \mapXMLvalue {cml:a:l} {bl} {\chemicalleftbottom}
-\mapXMLvalue {cml:a:l} {rt} {\chemicalrighttop} % \mapXMLvalue {cml:a:l} {tr} {\chemicalrighttop}
-\mapXMLvalue {cml:a:l} {rb} {\chemicalrightbottom} % \mapXMLvalue {cml:a:l} {br} {\chemicalrightbottom}
-\mapXMLvalue {cml:a:l} {x} {\chemicaltighttext}
-\mapXMLvalue {cml:a:l} {sl} {\chemicalsmashedleft} % \mapXMLvalue {cml:a:l} {ls} {\chemicalsmashedleft}
-\mapXMLvalue {cml:a:l} {sm} {\chemicalsmashedmiddle} % \mapXMLvalue {cml:a:l} {ms} {\chemicalsmashedmiddle}
-\mapXMLvalue {cml:a:l} {sr} {\chemicalsmashedright} % \mapXMLvalue {cml:a:l} {rs} {\chemicalsmashedright}
+ {\xmlval{cml:a:l}{#1}{\chemicalright}}% {#2}{#3}}
+
+\xmlmapvalue {cml:a:l} {t} {\chemicaltop}
+\xmlmapvalue {cml:a:l} {b} {\chemicalbottom}
+\xmlmapvalue {cml:a:l} {l} {\chemicalleft}
+\xmlmapvalue {cml:a:l} {r} {\chemicalright}
+\xmlmapvalue {cml:a:l} {lc} {\chemicalleftcentered} % \xmlmapvalue {cml:a:l} {cl} {\chemicalleftcentered}
+\xmlmapvalue {cml:a:l} {rc} {\chemicalrightcentered} % \xmlmapvalue {cml:a:l} {cr} {\chemicalrightcentered}
+\xmlmapvalue {cml:a:l} {tl} {\chemicaltopleft} % \xmlmapvalue {cml:a:l} {lt} {\chemicaltopleft}
+\xmlmapvalue {cml:a:l} {bl} {\chemicalbottomleft} % \xmlmapvalue {cml:a:l} {lb} {\chemicalbottomleft}
+\xmlmapvalue {cml:a:l} {tr} {\chemicaltopright} % \xmlmapvalue {cml:a:l} {rt} {\chemicaltopright}
+\xmlmapvalue {cml:a:l} {br} {\chemicalbottomright} % \xmlmapvalue {cml:a:l} {rb} {\chemicalbottomright}
+\xmlmapvalue {cml:a:l} {lt} {\chemicallefttop} % \xmlmapvalue {cml:a:l} {tl} {\chemicallefttop}
+\xmlmapvalue {cml:a:l} {lb} {\chemicalleftbottom} % \xmlmapvalue {cml:a:l} {bl} {\chemicalleftbottom}
+\xmlmapvalue {cml:a:l} {rt} {\chemicalrighttop} % \xmlmapvalue {cml:a:l} {tr} {\chemicalrighttop}
+\xmlmapvalue {cml:a:l} {rb} {\chemicalrightbottom} % \xmlmapvalue {cml:a:l} {br} {\chemicalrightbottom}
+\xmlmapvalue {cml:a:l} {x} {\chemicaltighttext}
+\xmlmapvalue {cml:a:l} {sl} {\chemicalsmashedleft} % \xmlmapvalue {cml:a:l} {ls} {\chemicalsmashedleft}
+\xmlmapvalue {cml:a:l} {sm} {\chemicalsmashedmiddle} % \xmlmapvalue {cml:a:l} {ms} {\chemicalsmashedmiddle}
+\xmlmapvalue {cml:a:l} {sr} {\chemicalsmashedright} % \xmlmapvalue {cml:a:l} {rs} {\chemicalsmashedright}
\startxmlsetups cml:forever
\left[\xmlflush{#1}\right]
diff --git a/tex/context/base/x-mathml.mkiv b/tex/context/base/x-mathml.mkiv
index 5e1046272..e5da5f832 100644
--- a/tex/context/base/x-mathml.mkiv
+++ b/tex/context/base/x-mathml.mkiv
@@ -327,7 +327,7 @@
\startxmlsetups mml:apply:mml:fn
\xmldoifelse {#1} {/mml:ci} {
- \edef\mmlfnci{\xmlcontent{#1}{/mml:ci}}% replaces \XMLfnoperator
+ \edef\mmlfnci{\xmlcontent{#1}{/mml:ci}}%
\doifsetupselse{mml:fn:\mmlfnci} { % was mmc:fn:...
\xmlsetup{#1}{mml:fn:\mmlfnci} % \MMLdoL/MMLdoR to be handled in plugin
} {
@@ -391,12 +391,12 @@
\let\MMCfnright\relax
\fi
\xmldoifelse {#1} {/mml:ci} { % first
- \edef\mmlfnci{\xmlcontent{#1}{/mml:ci}}% replaces \XMLfnoperator
+ \edef\mmlfnci{\xmlcontent{#1}{/mml:ci}}%
\doifsetupselse{mml:fn:\mmlfnci} { % was mmc:fn:...
\xmlsetup{#1}{mml:fn:\mmlfnci} % \MMLdoL/MMLdoR to be handled in plugin
} {
\MMLcreset
- \getXMLstackdata\plusone
+ \mmlfirst{#1}
}
} {
\xmldoifelse {#1} {/mml:apply} { % first
@@ -422,7 +422,7 @@
\startxmlsetups mmc:fn:apply % where used?
\xmldoifelse {#1} {/mml:ci} { % first
- \edef\mmlfnci{\xmlcontent{#1}{/mml:ci}}% replaces \XMLfnoperator
+ \edef\mmlfnci{\xmlcontent{#1}{/mml:ci}}%
\doifsetupselse{mml:fn:\mmlfnci} { % was mmc:fn:...
\xmlsetup{#1}{mml:fn:\mmlfnci} % \MMLdoL/MMLdoR to be handled in plugin
} {
@@ -475,22 +475,6 @@
% \startxmlsetups mml:csymbol:<url> \stopxmlsetups
%D Alternative b will convert periods into comma's:
-%D
-%D \startbuffer
-%D \startXMLdata
-%D <imath><apply><cn>1.23</cn></apply></imath>
-%D <dmath><apply><cn>1.23</cn></apply></dmath>
-%D \stopXMLdata
-%D
-%D \type{\setupMMLappearance[cn] [alternative=b]}
-%D
-%D \startXMLdata
-%D <imath><apply><cn>1.23</cn></apply></imath>
-%D <dmath><apply><cn>1.23</cn></apply></dmath>
-%D \stopXMLdata
-%D \stopbuffer
-%D
-%D \start \typebuffer \getbuffer \stop
\setupMMLappearance[cn] [\c!alternative=\v!a]
\setupMMLappearance[polar] [\c!alternative=\v!a] % a|b|c
@@ -599,13 +583,11 @@
\stopxmlsetups
\startxmlsetups mml:cn:polar:b
-% {\mr e}^{\xmlsnippet{#1}{1}\mskip2mu\getXMLentity{imaginaryi}}
{\mr e}^{\xmlsnippet{#1}{1}+\xmlsnippet{#1}{3}\thinspace{\mr i}}
\stopxmlsetups
\startxmlsetups mml:cn:polar:c
-% \exp\left(\xmlsnippet{#1}{1}\mskip2mu\getXMLentity{imaginaryi}\right)}
- \exp\left(\xmlsnippet{#1}{1}+\xmlsnippet{#1}{3}\thinspace\getXMLentity{imaginaryi}\right)
+ \exp\left(\xmlsnippet{#1}{1}+\xmlsnippet{#1}{3}\thinspace{\mr i}\right)
\stopxmlsetups
\startxmlsetups mml:cn:complex-polar
@@ -1240,22 +1222,6 @@
\setupMMLappearance[diff][\c!location=\v!top,\c!alternative=\v!a]
-% \setupMMLappearance[diff][alternative=b]
-%
-% \startXMLdata
-% <math><apply><apply><diff/><bvar><ci>x</ci></bvar><ci>f</ci></apply><ci>a</ci></apply></math>
-% <math><apply><apply><diff/><bvar><ci>x</ci></bvar><degree>2</degree><ci>f</ci></apply><ci>a</ci></apply></math>
-% \stopXMLdata
-
-% d^y/dx^2
-%
-% \startXMLdata
-% <math><apply><diff/>
-% <bvar><ci>x</ci><cn type="integer">2</cn></bvar>
-% <lambda><bvar><ci>x</ci></bvar><ci>y</ci></lambda>
-% </apply></math>
-% \stopXMLdata
-
\startxmlsetups mml:diff
\MMLcreset
\doifelse \MMLdiffalternative \v!a {
@@ -1987,7 +1953,7 @@
\frac{\mmlfirst{#1}}{\mmlsecond{#1}}
}
\else
- \doifXMLvalelse {mml:mfrac:linethickness} \mmlfraclinethickness {
+ \doifelse {\xmlval{mml:mfrac:linethickness}{\mmlfraclinethickness}{}} {} {
\scratchdimen\xmlval{mml:mfrac:linethickness}\mmlfraclinethickness{.4pt}
} {
\setdimensionwithunit\scratchdimen\mmlfraclinethickness{pt}
@@ -2296,27 +2262,8 @@
\protect \endinput
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
% TODO:
-
-\startmoduletestsection
-
-\def\xflushXMLstackwith#1#2#3#4% num bgroup egroup whatever
- {\dostepwiserecurse{#1}\XMLstacklevel\plusone
- {#2\relax
- \ifnum\recurselevel>#1\relax#4\fi
- \getXMLstackdata\recurselevel
- #3}}
-
-\def\xflushXMLstackfrom#1#2#3%
- {\dostepwiserecurse{#1}\XMLstacklevel\plusone
- {#2\getXMLstackdata\recurselevel#3}}
-
+%
% <apply><divide/>
% <apply><minus/>
% <apply><minus/><ci>b</ci></apply>
@@ -2330,46 +2277,59 @@
% </apply>
% </apply>
-\startxmlsetups mml:minus
- \doif \MMLsignreduction \v!yes {
- \setMMLcreset{fn,\MMLcfunctionlist}
- }
- \ifcase\XMLstacklevel
- \or
- % self
- \or
- -\getXMLstackdata\plustwo
- \else
- \dostepwiserecurse \plustwo \XMLstacklevel \plusone {
- \begingroup
- \doifelse {\getXMLstackname\recurselevel} {apply} {
- \ifnum\recurselevel=\plustwo
- \begingroup
- \dodoifelseMMCfunctioninapply \recurselevel {minus} {
- \ifnum\XMLstacklevel>\plustwo
- \endgroup
- \else
- \endgroup
- \MMLcreset
- \fi
- } {
- \endgroup
- }
- \else
- \doifelseMMCfunctioninapply \recurselevel {\MMLcfunctionlist,\MMLcconstructlist} {
- \MMLcreset
- } {
- }
- \fi
- } {
- }
- \getXMLstackdata\recurselevel
- \ifnum\recurselevel<\XMLstacklevel\relax
- -
- \fi
- \endgroup
- }
- \fi
-\stopxmlsetups
-
-\stopmoduletestsection
+% \startmoduletestsection
+%
+% \def\xflushXMLstackwith#1#2#3#4% num bgroup egroup whatever
+% {\dostepwiserecurse{#1}\XMLstacklevel\plusone
+% {#2\relax
+% \ifnum\recurselevel>#1\relax#4\fi
+% \getXMLstackdata\recurselevel
+% #3}}
+%
+% \def\xflushXMLstackfrom#1#2#3%
+% {\dostepwiserecurse{#1}\XMLstacklevel\plusone
+% {#2\getXMLstackdata\recurselevel#3}}
+%
+% \startxmlsetups mml:minus
+% \doif \MMLsignreduction \v!yes {
+% \setMMLcreset{fn,\MMLcfunctionlist}
+% }
+% \ifcase\XMLstacklevel
+% \or
+% % self
+% \or
+% -\getXMLstackdata\plustwo
+% \else
+% \dostepwiserecurse \plustwo \XMLstacklevel \plusone {
+% \begingroup
+% \doifelse {\getXMLstackname\recurselevel} {apply} {
+% \ifnum\recurselevel=\plustwo
+% \begingroup
+% \dodoifelseMMCfunctioninapply \recurselevel {minus} {
+% \ifnum\XMLstacklevel>\plustwo
+% \endgroup
+% \else
+% \endgroup
+% \MMLcreset
+% \fi
+% } {
+% \endgroup
+% }
+% \else
+% \doifelseMMCfunctioninapply \recurselevel {\MMLcfunctionlist,\MMLcconstructlist} {
+% \MMLcreset
+% } {
+% }
+% \fi
+% } {
+% }
+% \getXMLstackdata\recurselevel
+% \ifnum\recurselevel<\XMLstacklevel\relax
+% -
+% \fi
+% \endgroup
+% }
+% \fi
+% \stopxmlsetups
+%
+% \stopmoduletestsection
diff --git a/tex/context/base/x-pending.mkiv b/tex/context/base/x-pending.mkiv
index 20fe5fb6a..fa4cd18de 100644
--- a/tex/context/base/x-pending.mkiv
+++ b/tex/context/base/x-pending.mkiv
@@ -17,7 +17,7 @@
%D \MKII\ code, when enabled.
\startluacode
- dofile(resolvers.find_file("lxml-inf.lua"))
+ dofile(resolvers.findfile("lxml-inf.lua"))
local list = { }
function document.check_pending_xml_element(str)
list[str] = (list[str] and (list[str]+1)) or 1
diff --git a/tex/context/base/x-xtag.mkiv b/tex/context/base/x-xtag.mkiv
index d7e2f2559..eaeb89f85 100644
--- a/tex/context/base/x-xtag.mkiv
+++ b/tex/context/base/x-xtag.mkiv
@@ -11,8 +11,12 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+%D Here we load the \MKII\ (mostly) streaming \XML\ parser. We
+%D define a couple of catcode regimes first.
+
\ifdefined\XMLbanner \endinput \fi
+\loadcorefile{catc-xml}
\loadcorefile{xtag-ini}
\loadcorefile{xtag-ext}
\loadcorefile{xtag-exp}
diff --git a/tex/context/base/xtag-ini.tex b/tex/context/base/xtag-ini.tex
index e48ef3da6..0c17d1dcc 100644
--- a/tex/context/base/xtag-ini.tex
+++ b/tex/context/base/xtag-ini.tex
@@ -16,6 +16,26 @@
%D Beware: don't rely on \longempty things, since this may
%D change!
+%D \macros
+%D {defineinputmode,setinputmode}
+%D
+%D For old times sake we still support:
+
+\unexpanded\def\defineinputmode[#1]{\expandafter\newtoks\csname every#1inputmode\endcsname}
+\unexpanded\def\setinputmode [#1]{\the\executeifdefined{every#1inputmode}\emptytoks}
+
+\defineinputmode [TEX]
+\defineinputmode [XML]
+
+\setinputmode [TEX]
+
+%D Finally we make sure that the compound handler keeps doingits job.
+
+\ifx\normalcompound\undefined \let\normalcompound=| \fi
+
+\appendtoks \catcode`|=\activecatcode \let|\normalcompound \to \everyTEXinputmode
+\appendtoks \catcode`|=\lettercatcode \to \everyXMLinputmode
+
%D This module is highly optimized for speed, which sometimes
%D leads to rather unreadable code. Sorry for this.
@@ -192,7 +212,7 @@
\newif\ifignoreXMLspaces
\newif\iffixedXMLfont
\newif\iftraceXMLelements
-%newif\ifprocessingXML
+\newif\ifprocessingXML
\newcount\XMLlevel % scratchcounter
\newcount\XMLdepth % used here
diff --git a/tex/context/interface/cont-cs.xml b/tex/context/interface/cont-cs.xml
index d19ba7c4f..5214a4254 100644
--- a/tex/context/interface/cont-cs.xml
+++ b/tex/context/interface/cont-cs.xml
@@ -6868,12 +6868,6 @@
<cd:parameter name="vyska">
<cd:constant type="cd:dimension"/>
</cd:parameter>
- <cd:parameter name="voffset">
- <cd:constant type="cd:dimension"/>
- </cd:parameter>
- <cd:parameter name="hoffset">
- <cd:constant type="cd:dimension"/>
- </cd:parameter>
<cd:parameter name="stranka">
<cd:constant type="vlevo"/>
<cd:constant type="ano"/>
diff --git a/tex/context/interface/cont-de.xml b/tex/context/interface/cont-de.xml
index fcd3194c6..acdb17c5a 100644
--- a/tex/context/interface/cont-de.xml
+++ b/tex/context/interface/cont-de.xml
@@ -6868,12 +6868,6 @@
<cd:parameter name="hoehe">
<cd:constant type="cd:dimension"/>
</cd:parameter>
- <cd:parameter name="voffset">
- <cd:constant type="cd:dimension"/>
- </cd:parameter>
- <cd:parameter name="hoffset">
- <cd:constant type="cd:dimension"/>
- </cd:parameter>
<cd:parameter name="seite">
<cd:constant type="links"/>
<cd:constant type="ja"/>
diff --git a/tex/context/interface/cont-en.xml b/tex/context/interface/cont-en.xml
index 76fb020cb..5611cbfbf 100644
--- a/tex/context/interface/cont-en.xml
+++ b/tex/context/interface/cont-en.xml
@@ -6868,12 +6868,6 @@
<cd:parameter name="height">
<cd:constant type="cd:dimension"/>
</cd:parameter>
- <cd:parameter name="voffset">
- <cd:constant type="cd:dimension"/>
- </cd:parameter>
- <cd:parameter name="hoffset">
- <cd:constant type="cd:dimension"/>
- </cd:parameter>
<cd:parameter name="page">
<cd:constant type="left"/>
<cd:constant type="yes"/>
diff --git a/tex/context/interface/cont-fr.xml b/tex/context/interface/cont-fr.xml
index 235b48fe3..a062fec82 100644
--- a/tex/context/interface/cont-fr.xml
+++ b/tex/context/interface/cont-fr.xml
@@ -6868,12 +6868,6 @@
<cd:parameter name="hauteur">
<cd:constant type="cd:dimension"/>
</cd:parameter>
- <cd:parameter name="voffset">
- <cd:constant type="cd:dimension"/>
- </cd:parameter>
- <cd:parameter name="decalagehauteur">
- <cd:constant type="cd:dimension"/>
- </cd:parameter>
<cd:parameter name="page">
<cd:constant type="gauche"/>
<cd:constant type="oui"/>
diff --git a/tex/context/interface/cont-it.xml b/tex/context/interface/cont-it.xml
index c920875e6..de37bdff9 100644
--- a/tex/context/interface/cont-it.xml
+++ b/tex/context/interface/cont-it.xml
@@ -6868,12 +6868,6 @@
<cd:parameter name="altezza">
<cd:constant type="cd:dimension"/>
</cd:parameter>
- <cd:parameter name="voffset">
- <cd:constant type="cd:dimension"/>
- </cd:parameter>
- <cd:parameter name="hoffset">
- <cd:constant type="cd:dimension"/>
- </cd:parameter>
<cd:parameter name="pagina">
<cd:constant type="sinistra"/>
<cd:constant type="si"/>
diff --git a/tex/context/interface/cont-nl.xml b/tex/context/interface/cont-nl.xml
index b04c64bf7..b82a7d07a 100644
--- a/tex/context/interface/cont-nl.xml
+++ b/tex/context/interface/cont-nl.xml
@@ -6868,12 +6868,6 @@
<cd:parameter name="hoogte">
<cd:constant type="cd:dimension"/>
</cd:parameter>
- <cd:parameter name="voffset">
- <cd:constant type="cd:dimension"/>
- </cd:parameter>
- <cd:parameter name="hoffset">
- <cd:constant type="cd:dimension"/>
- </cd:parameter>
<cd:parameter name="pagina">
<cd:constant type="links"/>
<cd:constant type="ja"/>
diff --git a/tex/context/interface/cont-pe.xml b/tex/context/interface/cont-pe.xml
index 99579e224..36cd1bb1c 100644
--- a/tex/context/interface/cont-pe.xml
+++ b/tex/context/interface/cont-pe.xml
@@ -6868,12 +6868,6 @@
<cd:parameter name="ارتفاع">
<cd:constant type="cd:dimension"/>
</cd:parameter>
- <cd:parameter name="آفست‌ع">
- <cd:constant type="cd:dimension"/>
- </cd:parameter>
- <cd:parameter name="آفست‌ا">
- <cd:constant type="cd:dimension"/>
- </cd:parameter>
<cd:parameter name="صفحه">
<cd:constant type="چپ"/>
<cd:constant type="بله"/>
diff --git a/tex/context/interface/cont-ro.xml b/tex/context/interface/cont-ro.xml
index 3433709bd..156e88ea0 100644
--- a/tex/context/interface/cont-ro.xml
+++ b/tex/context/interface/cont-ro.xml
@@ -6868,12 +6868,6 @@
<cd:parameter name="inaltime">
<cd:constant type="cd:dimension"/>
</cd:parameter>
- <cd:parameter name="voffset">
- <cd:constant type="cd:dimension"/>
- </cd:parameter>
- <cd:parameter name="hoffset">
- <cd:constant type="cd:dimension"/>
- </cd:parameter>
<cd:parameter name="pagina">
<cd:constant type="stanga"/>
<cd:constant type="da"/>
diff --git a/tex/context/sample/douglas.tex b/tex/context/sample/douglas.tex
index cf5fd50f3..7d986d484 100644
--- a/tex/context/sample/douglas.tex
+++ b/tex/context/sample/douglas.tex
@@ -1,23 +1,23 @@
Donald Knuth has spent the past several years working on a
system allowing him to control many aspects of the design
-of his forthcoming books|.|from the typesetting and layout
+of his forthcoming books, from the typesetting and layout
down to the very shapes of the letters! Seldom has an
author had anything remotely like this power to control the
final appearance of his or her work. Knuth's \TEX\
typesetting system has become well|-|known and available in
many countries around the world. By contrast, his
\METAFONT\ system for designing families of typefaces has
-not become as well known or available.
+not become as well known or available.
-In his article \quotation {The Concept of a Meta|-|Font},
-Knuth sets forth for the first time the underlying
-philosophy of \METAFONT, as well as some of its products.
-Not only is the concept exiting and clearly well executed,
+In his article \quotation {The Concept of a Meta|-|Font},
+Knuth sets forth for the first time the underlying
+philosophy of \METAFONT, as well as some of its products.
+Not only is the concept exiting and clearly well executed,
but in my opinion the article is charmingly written as well.
-However, despite my overall enthusiasm for Knuth's idea and
-article, there are some points in it that I feel might be
-taken wrongly by many readers, and since they are points
-that touch close to my deepest interests in artificial
-intelligence and esthetic theory, I felt compelled to make
-some comments to clarify certain important issues raised by
-\quotation {The Concept of a Meta|-|Font}.
+However, despite my overall enthusiasm for Knuth's idea and
+article, there are some points in it that I feel might be
+taken wrongly by many readers, and since they are points
+that touch close to my deepest interests in artificial
+intelligence and esthetic theory, I felt compelled to make
+some comments to clarify certain important issues raised by
+\quotation {The Concept of a Meta|-|Font}.
diff --git a/tex/context/test/pdf-x-common.mkiv b/tex/context/test/pdf-x-common.mkiv
index ac514c50d..463a60e71 100644
--- a/tex/context/test/pdf-x-common.mkiv
+++ b/tex/context/test/pdf-x-common.mkiv
@@ -12,6 +12,8 @@
\definecolor[cmykblack][k=1]
\definecolor[transtest][y=1,a=1,t=.5]
+\starttext
+
\startTEXpage
\blackrule[width=1cm,height=1cm,color=cmykcyan]
\blackrule[width=1cm,height=1cm,color=cmykmagenta]
@@ -20,3 +22,9 @@
\blackrule[width=1cm,height=1cm,color=transtest]\hskip-.5cm
\blackrule[width=1cm,height=1cm,color=transtest]
\stopTEXpage
+
+\startTEXpage
+ \tt Some text.
+\stopTEXpage
+
+\stoptext
diff --git a/tex/generic/context/luatex-fonts-merged.lua b/tex/generic/context/luatex-fonts-merged.lua
index e37ed7638..65a6db854 100644
--- a/tex/generic/context/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 08/20/10 00:00:51
+-- merge date : 09/03/10 11:05:53
do -- begin closure to overcome local limits and interference
@@ -290,7 +290,9 @@ patterns.sign = sign
patterns.cardinal = sign^0 * digit^1
patterns.integer = sign^0 * digit^1
patterns.float = sign^0 * digit^0 * P('.') * digit^1
+patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
patterns.number = patterns.float + patterns.integer
+patterns.cnumber = patterns.cfloat + patterns.integer
patterns.oct = P("0") * R("07")^1
patterns.octal = patterns.oct
patterns.HEX = P("0x") * R("09","AF")^1
@@ -509,6 +511,10 @@ function lpeg.secondofsplit(separator) -- nil if not split
return splitter
end
+function lpeg.balancer(left,right)
+ return P { left * ((1 - left - right) + V(1))^0 * right }
+end
+
--~ print(1,match(lpeg.firstofsplit(":"),"bc:de"))
--~ print(2,match(lpeg.firstofsplit(":"),":de")) -- empty
--~ print(3,match(lpeg.firstofsplit(":"),"bc"))
@@ -1660,26 +1666,14 @@ file.iswritable = file.is_writable -- depricated
-- todo: lpeg
---~ function file.split_path(str)
---~ local t = { }
---~ str = gsub(str,"\\", "/")
---~ str = gsub(str,"(%a):([;/])", "%1\001%2")
---~ for name in gmatch(str,"([^;:]+)") do
---~ if name ~= "" then
---~ t[#t+1] = gsub(name,"\001",":")
---~ end
---~ end
---~ return t
---~ end
-
local checkedsplit = string.checkedsplit
-function file.split_path(str,separator)
+function file.splitpath(str,separator) -- string
str = gsub(str,"\\","/")
return checkedsplit(str,separator or io.pathseparator)
end
-function file.join_path(tab)
+function file.joinpath(tab) -- table
return concat(tab,io.pathseparator) -- can have trailing //
end
@@ -2115,7 +2109,7 @@ function io.ask(question,default,options)
end
end
-function io.readnumber(f,n,m)
+local function readnumber(f,n,m)
if m then
f:seek("set",n)
n = m
@@ -2127,18 +2121,20 @@ function io.readnumber(f,n,m)
return 256*a + b
elseif n == 4 then
local a, b, c, d = byte(f:read(4),1,4)
- return 256^3 * a + 256^2 * b + 256*c + d
+ return 256*256*256 * a + 256*256 * b + 256*c + d
elseif n == 8 then
local a, b = readnumber(f,4), readnumber(f,4)
- return 256 * b + c
+ return 256 * a + b
elseif n == 12 then
local a, b, c = readnumber(f,4), readnumber(f,4), readnumber(f,4)
- return 256^2 * a + 256 * b + c
+ return 256*256 * a + 256 * b + c
else
return 0
end
end
+io.readnumber = readnumber
+
function io.readstring(f,n,m)
if m then
f:seek("set",n)
@@ -2194,6 +2190,10 @@ logs = {
callbacks = {
register = function(n,f) return callback.register(n,f) end,
}
+utilities = {
+ allocate = function(t) return t end,
+ mark = function(t) return t end,
+}
-- we need to cheat a bit here
@@ -2210,7 +2210,7 @@ local remapper = {
fea = "font feature files",
}
-function resolvers.find_file(name,kind)
+function resolvers.findfile(name,kind)
name = string.gsub(name,"\\","\/")
kind = string.lower(kind)
return kpse.find_file(name,(kind and kind ~= "" and (remapper[kind] or kind)) or file.extname(name,"tex"))
@@ -2220,7 +2220,7 @@ function resolvers.findbinfile(name,kind)
if not kind or kind == "" then
kind = file.extname(name) -- string.match(name,"%.([^%.]-)$")
end
- return resolvers.find_file(name,(kind and remapper[kind]) or kind)
+ return resolvers.findfile(name,(kind and remapper[kind]) or kind)
end
-- Caches ... I will make a real stupid version some day when I'm in the
@@ -2391,7 +2391,8 @@ local mt = {
t.readables = readables
return readables
end
- end
+ end,
+ __storage__ = true
}
function containers.define(category, subcategory, version, enabled)
@@ -2516,7 +2517,7 @@ local fontdata = fonts.ids or { }
function nodes.simple_font_handler(head)
-- lang.hyphenate(head)
head = nodes.handlers.characters(head)
- nodes.handlers.injectkerns(head)
+ nodes.injections.handler(head)
nodes.handlers.protectglyphs(head)
head = node.ligaturing(head)
head = node.kerning(head)
@@ -2651,6 +2652,9 @@ fonts = fonts or { }
fonts.tfm = fonts.tfm or { }
fonts.ids = fonts.ids or { }
+nodes.injections = nodes.injections or { }
+local injections = nodes.injections
+
local fontdata = fonts.ids
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
@@ -2683,7 +2687,7 @@ local kerns = { }
-- for the moment we pass the r2l key ... volt/arabtype tests
-function nodes.set_cursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
+function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2])
local ws, wn = tfmstart.width, tfmnext.width
local bound = #cursives + 1
@@ -2693,7 +2697,7 @@ function nodes.set_cursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
return dx, dy, bound
end
-function nodes.set_pair(current,factor,rlmode,r2lflag,spec,tfmchr)
+function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4]
-- dy = y - h
if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then
@@ -2712,7 +2716,7 @@ function nodes.set_pair(current,factor,rlmode,r2lflag,spec,tfmchr)
return x, y, w, h -- no bound
end
-function nodes.set_kern(current,factor,rlmode,x,tfmchr)
+function injections.setkern(current,factor,rlmode,x,tfmchr)
local dx = factor*x
if dx ~= 0 then
local bound = #kerns + 1
@@ -2724,7 +2728,7 @@ function nodes.set_kern(current,factor,rlmode,x,tfmchr)
end
end
-function nodes.set_mark(start,base,factor,rlmode,ba,ma,index) --ba=baseanchor, ma=markanchor
+function injections.setmark(start,base,factor,rlmode,ba,ma,index) --ba=baseanchor, ma=markanchor
local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2])
local bound = has_attribute(base,markbase)
if bound then
@@ -2752,7 +2756,7 @@ local function dir(n)
return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
end
-function nodes.trace_injection(head)
+local function trace(head)
report_injections("begin run")
for n in traverse_id(glyph_code,head) do
if n.subtype < 256 then
@@ -2803,12 +2807,12 @@ end
-- todo: reuse tables (i.e. no collection), but will be extra fields anyway
-- todo: check for attribute
-function nodes.handlers.injectkerns(head,where,keep)
+function injections.handler(head,where,keep)
local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns)
if has_marks or has_cursives then
--~ if has_marks or has_cursives or has_kerns then
if trace_injections then
- nodes.trace_injection(head)
+ trace(head)
end
-- in the future variant we will not copy items but refs to tables
local done, ky, rl, valid, cx, wx, mk = false, { }, { }, { }, { }, { }, { }
@@ -3012,7 +3016,7 @@ function nodes.handlers.injectkerns(head,where,keep)
end
elseif has_kerns then
if trace_injections then
- nodes.trace_injection(head)
+ trace(head)
end
for n in traverse_id(glyph_code,head) do
if n.subtype < 256 then
@@ -3083,6 +3087,7 @@ local utf = unicode.utf8
local format, serialize = string.format, table.serialize
local write_nl = texio.write_nl
local lower = string.lower
+local allocate, mark = utilities.storage.allocate, utilities.storage.mark
local report_define = logs.new("define fonts")
@@ -3095,9 +3100,11 @@ fonts = fonts or { }
-- we will also have des and fam hashes
-fonts.ids = fonts.ids or { } fonts.identifiers = fonts.ids -- aka fontdata
-fonts.chr = fonts.chr or { } fonts.characters = fonts.chr -- aka chardata
-fonts.qua = fonts.qua or { } fonts.quads = fonts.qua -- aka quaddata
+-- beware, soem alreadyu defined
+
+fonts.ids = mark(fonts.ids or { }) fonts.identifiers = fonts.ids -- aka fontdata
+fonts.chr = mark(fonts.chr or { }) fonts.characters = fonts.chr -- aka chardata
+fonts.qua = mark(fonts.qua or { }) fonts.quads = fonts.qua -- aka quaddata
fonts.tfm = fonts.tfm or { }
fonts.vf = fonts.vf or { }
@@ -3105,7 +3112,7 @@ fonts.afm = fonts.afm or { }
fonts.pfb = fonts.pfb or { }
fonts.otf = fonts.otf or { }
-fonts.private = 0xF0000 -- 0x10FFFF
+fonts.privateoffset = 0xF0000 -- 0x10FFFF
fonts.verbose = false -- more verbose cache tables
fonts.ids[0] = { -- nullfont
@@ -3139,15 +3146,15 @@ fonts.processors = fonts.processors or {
fonts.manipulators = fonts.manipulators or {
}
-fonts.define = fonts.define or { }
-fonts.define.specify = fonts.define.specify or { }
-fonts.define.specify.synonyms = fonts.define.specify.synonyms or { }
+fonts.definers = fonts.definers or { }
+fonts.definers.specifiers = fonts.definers.specifiers or { }
+fonts.definers.specifiers.synonyms = fonts.definers.specifiers.synonyms or { }
-- tracing
if not fonts.color then
- fonts.color = {
+ fonts.color = allocate {
set = function() end,
reset = function() end,
}
@@ -3156,7 +3163,7 @@ end
-- format identification
-fonts.formats = { }
+fonts.formats = allocate()
function fonts.fontformat(filename,default)
local extname = lower(file.extname(filename))
@@ -3186,6 +3193,8 @@ local utf = unicode.utf8
local next, format, match, lower, gsub = next, string.format, string.match, string.lower, string.gsub
local concat, sortedkeys, utfbyte, serialize = table.concat, table.sortedkeys, utf.byte, table.serialize
+local allocate = utilities.storage.allocate
+
local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
local trace_scaling = false trackers.register("fonts.scaling" , function(v) trace_scaling = v end)
@@ -3203,8 +3212,8 @@ local report_define = logs.new("define fonts")
local fonts = fonts
local tfm = fonts.tfm
-fonts.loaded = fonts.loaded or { }
-fonts.dontembed = fonts.dontembed or { }
+fonts.loaded = allocate()
+fonts.dontembed = allocate()
fonts.triggers = fonts.triggers or { } -- brrr
fonts.initializers = fonts.initializers or { }
fonts.initializers.common = fonts.initializers.common or { }
@@ -3222,10 +3231,10 @@ local glyph_code = nodecodes.glyph
supplied by <l n='luatex'/>.</p>
--ldx]]--
-tfm.resolve_vf = true -- false
-tfm.share_base_kerns = false -- true (.5 sec slower on mk but brings down mem from 410M to 310M, beware: then script/lang share too)
-tfm.mathactions = { }
-tfm.fontname_mode = "fullpath"
+tfm.resolvevirtualtoo = true -- false
+tfm.sharebasekerns = false -- true (.5 sec slower on mk but brings down mem from 410M to 310M, beware: then script/lang share too)
+tfm.mathactions = { }
+tfm.fontnamemode = "fullpath"
tfm.enhance = tfm.enhance or function() end
@@ -3240,7 +3249,7 @@ function tfm.read_from_tfm(specification)
tfmdata = font.read_tfm(fname,specification.size) -- not cached, fast enough
if tfmdata then
tfmdata.descriptions = tfmdata.descriptions or { }
- if tfm.resolve_vf then
+ if tfm.resolvevirtualtoo then
fonts.logger.save(tfmdata,file.extname(fname),specification) -- strange, why here
fname = resolvers.findbinfile(specification.name, 'ovf')
if fname and fname ~= "" then
@@ -3301,7 +3310,7 @@ end
to scale virtual characters.</p>
--ldx]]--
-function tfm.get_virtual_id(tfmdata)
+function tfm.getvirtualid(tfmdata)
-- since we don't know the id yet, we use 0 as signal
if not tfmdata.fonts then
tfmdata.type = "virtual"
@@ -3313,7 +3322,7 @@ function tfm.get_virtual_id(tfmdata)
end
end
-function tfm.check_virtual_id(tfmdata, id)
+function tfm.checkvirtualid(tfmdata, id)
if tfmdata and tfmdata.type == "virtual" then
if not tfmdata.fonts or #tfmdata.fonts == 0 then
tfmdata.type, tfmdata.fonts = "real", nil
@@ -3343,7 +3352,7 @@ fonts.trace_scaling = false
-- sharedkerns are unscaled and are be hashed by concatenated indexes
--~ function tfm.check_base_kerns(tfmdata)
---~ if tfm.share_base_kerns then
+--~ if tfm.sharebasekerns then
--~ local sharedkerns = tfmdata.sharedkerns
--~ if sharedkerns then
--~ local basekerns = { }
@@ -3355,7 +3364,7 @@ fonts.trace_scaling = false
--~ end
--~ function tfm.prepare_base_kerns(tfmdata)
---~ if tfm.share_base_kerns and not tfmdata.sharedkerns then
+--~ if tfm.sharebasekerns and not tfmdata.sharedkerns then
--~ local sharedkerns = { }
--~ tfmdata.sharedkerns = sharedkerns
--~ for u, chr in next, tfmdata.characters do
@@ -3384,7 +3393,43 @@ local charactercache = { }
-- a virtual font has italic correction make sure to set the
-- has_italic flag. Some more flags will be added in the future.
-function tfm.calculate_scale(tfmtable, scaledpoints)
+--[[ldx--
+<p>The reason why the scaler was originally split, is that for a while we experimented
+with a helper function. However, in practice the <l n='api'/> calls are too slow to
+make this profitable and the <l n='lua'/> based variant was just faster. A days
+wasted day but an experience richer.</p>
+--ldx]]--
+
+tfm.autocleanup = true
+
+local lastfont = nil
+
+-- we can get rid of the tfm instance when we have fast access to the
+-- scaled character dimensions at the tex end, e.g. a fontobject.width
+--
+-- flushing the kern and ligature tables from memory saves a lot (only
+-- base mode) but it complicates vf building where the new characters
+-- demand this data .. solution: functions that access them
+
+function tfm.cleanuptable(tfmdata) -- we need a cleanup callback, now we miss the last one
+ if tfm.autocleanup then -- ok, we can hook this into everyshipout or so ... todo
+ if tfmdata.type == 'virtual' or tfmdata.virtualized then
+ for k, v in next, tfmdata.characters do
+ if v.commands then v.commands = nil end
+ -- if v.kerns then v.kerns = nil end
+ end
+ else
+ -- for k, v in next, tfmdata.characters do
+ -- if v.kerns then v.kerns = nil end
+ -- end
+ end
+ end
+end
+
+function tfm.cleanup(tfmdata) -- we need a cleanup callback, now we miss the last one
+end
+
+function tfm.calculatescale(tfmtable, scaledpoints)
if scaledpoints < 0 then
scaledpoints = (- scaledpoints/1000) * tfmtable.designsize -- already in sp
end
@@ -3393,10 +3438,10 @@ function tfm.calculate_scale(tfmtable, scaledpoints)
return scaledpoints, delta, units
end
-function tfm.do_scale(tfmtable, scaledpoints, relativeid)
+function tfm.scale(tfmtable, scaledpoints, relativeid)
-- tfm.prepare_base_kerns(tfmtable) -- optimalization
local t = { } -- the new table
- local scaledpoints, delta, units = tfm.calculate_scale(tfmtable, scaledpoints, relativeid)
+ local scaledpoints, delta, units = tfm.calculatescale(tfmtable, scaledpoints, relativeid)
t.units_per_em = units or 1000
local hdelta, vdelta = delta, delta
-- unicoded unique descriptions shared cidinfo characters changed parameters indices
@@ -3478,7 +3523,7 @@ function tfm.do_scale(tfmtable, scaledpoints, relativeid)
local scaledheight = defaultheight * vdelta
local scaleddepth = defaultdepth * vdelta
local stackmath = tfmtable.ignore_stack_math ~= true
- local private = fonts.private
+ local private = fonts.privateoffset
local sharedkerns = { }
for k,v in next, characters do
local chr, description, index
@@ -3763,55 +3808,14 @@ function tfm.do_scale(tfmtable, scaledpoints, relativeid)
report_define("used for accesing subfont: '%s'",t.psname or "nopsname")
report_define("used for subsetting: '%s'",t.fontname or "nofontname")
end
---~ print(t.fontname,table.serialize(t.MathConstants))
- return t, delta
-end
-
---[[ldx--
-<p>The reason why the scaler is split, is that for a while we experimented
-with a helper function. However, in practice the <l n='api'/> calls are too slow to
-make this profitable and the <l n='lua'/> based variant was just faster. A days
-wasted day but an experience richer.</p>
---ldx]]--
-
-tfm.auto_cleanup = true
-
-local lastfont = nil
-
--- we can get rid of the tfm instance when we have fast access to the
--- scaled character dimensions at the tex end, e.g. a fontobject.width
---
--- flushing the kern and ligature tables from memory saves a lot (only
--- base mode) but it complicates vf building where the new characters
--- demand this data .. solution: functions that access them
-
-function tfm.cleanup_table(tfmdata) -- we need a cleanup callback, now we miss the last one
- if tfm.auto_cleanup then -- ok, we can hook this into everyshipout or so ... todo
- if tfmdata.type == 'virtual' or tfmdata.virtualized then
- for k, v in next, tfmdata.characters do
- if v.commands then v.commands = nil end
- -- if v.kerns then v.kerns = nil end
- end
- else
- -- for k, v in next, tfmdata.characters do
- -- if v.kerns then v.kerns = nil end
- -- end
- end
- end
-end
-
-function tfm.cleanup(tfmdata) -- we need a cleanup callback, now we miss the last one
-end
-
-function tfm.scale(tfmtable, scaledpoints, relativeid)
- local t, factor = tfm.do_scale(tfmtable, scaledpoints, relativeid)
- t.factor = factor
- t.ascender = factor*(tfmtable.ascender or 0)
- t.descender = factor*(tfmtable.descender or 0)
+ -- this will move up (side effect of merging split call)
+ t.factor = delta
+ t.ascender = delta*(tfmtable.ascender or 0)
+ t.descender = delta*(tfmtable.descender or 0)
t.shared = tfmtable.shared or { }
t.unique = table.fastcopy(tfmtable.unique or {})
---~ print("scaling", t.name, t.factor) -- , tfm.hash_features(tfmtable.specification))
tfm.cleanup(t)
+ -- print(t.fontname,table.serialize(t.MathConstants))
return t
end
@@ -3820,10 +3824,12 @@ end
process features right.</p>
--ldx]]--
-fonts.analyzers = fonts.analyzers or { }
-fonts.analyzers.aux = fonts.analyzers.aux or { }
-fonts.analyzers.methods = fonts.analyzers.methods or { }
-fonts.analyzers.initializers = fonts.analyzers.initializers or { }
+fonts.analyzers = fonts.analyzers or { }
+local analyzers = fonts.analyzers
+
+analyzers.aux = analyzers.aux or { }
+analyzers.methods = analyzers.methods or { }
+analyzers.initializers = analyzers.initializers or { }
-- todo: analyzers per script/lang, cross font, so we need an font id hash -> script
-- e.g. latin -> hyphenate, arab -> 1/2/3 analyze
@@ -3832,7 +3838,7 @@ fonts.analyzers.initializers = fonts.analyzers.initializers or { }
local state = attributes.private('state')
-function fonts.analyzers.aux.setstate(head,font)
+function analyzers.aux.setstate(head,font)
local tfmdata = fontdata[font]
local characters = tfmdata.characters
local descriptions = tfmdata.descriptions
@@ -3893,7 +3899,7 @@ end
-- checking
-function tfm.checked_filename(metadata,whatever)
+function tfm.checkedfilename(metadata,whatever)
local foundfilename = metadata.foundfilename
if not foundfilename then
local askedfilename = metadata.filename or ""
@@ -4020,7 +4026,7 @@ local function locate(registry,ordering,supplement)
if trace_loading then
report_otf("checking cidmap, registry: %s, ordering: %s, supplement: %s, filename: %s",registry,ordering,supplement,filename)
end
- local fullname = resolvers.find_file(filename,'cid') or ""
+ local fullname = resolvers.findfile(filename,'cid') or ""
if fullname ~= "" then
cidmap = cid.load(fullname)
if cidmap then
@@ -4094,6 +4100,8 @@ local type, next, tonumber, tostring = type, next, tonumber, tostring
local gsub, lower, format = string.gsub, string.lower, string.format
local is_boolean = string.is_boolean
+local allocate = utilities.storage.allocate
+
local fonts = fonts
fonts.otf = fonts.otf or { }
local otf = fonts.otf
@@ -4104,7 +4112,7 @@ local tables = otf.tables
otf.meanings = otf.meanings or { }
local meanings = otf.meanings
-local scripts = {
+local scripts = allocate {
['dflt'] = 'Default',
['arab'] = 'Arabic',
@@ -4177,7 +4185,7 @@ local scripts = {
['yi' ] = 'Yi',
}
-local languages = {
+local languages = allocate {
['dflt'] = 'Default',
['aba'] = 'Abaza',
@@ -4571,7 +4579,7 @@ local languages = {
['zul'] = 'Zulu'
}
-local features = {
+local features = allocate {
['aalt'] = 'Access All Alternates',
['abvf'] = 'Above-Base Forms',
['abvm'] = 'Above-Base Mark Positioning',
@@ -4709,7 +4717,7 @@ local features = {
['tlig'] = 'Traditional TeX Ligatures',
}
-local baselines = {
+local baselines = allocate {
['hang'] = 'Hanging baseline',
['icfb'] = 'Ideographic character face bottom edge baseline',
['icft'] = 'Ideographic character face tope edige baseline',
@@ -4719,32 +4727,32 @@ local baselines = {
['romn'] = 'Roman baseline'
}
-local to_scripts = table.swaphash(scripts )
-local to_languages = table.swaphash(languages)
-local to_features = table.swaphash(features )
+local verbosescripts = allocate(table.swaphash(scripts ))
+local verboselanguages = allocate(table.swaphash(languages))
+local verbosefeatures = allocate(table.swaphash(features ))
-tables.scripts = scripts
-tables.languages = languages
-tables.features = features
-tables.baselines = baselines
+tables.scripts = scripts
+tables.languages = languages
+tables.features = features
+tables.baselines = baselines
-tables.to_scripts = to_scripts
-tables.to_languages = to_languages
-tables.to_features = to_features
+tables.verbosescripts = verbosescripts
+tables.verboselanguages = verboselanguages
+tables.verbosefeatures = verbosefeatures
-for k, v in next, to_features do
+for k, v in next, verbosefeatures do
local stripped = gsub(k,"%-"," ")
- to_features[stripped] = v
+ verbosefeatures[stripped] = v
local stripped = gsub(k,"[^a-zA-Z0-9]","")
- to_features[stripped] = v
+ verbosefeatures[stripped] = v
end
-for k, v in next, to_features do
- to_features[lower(k)] = v
+for k, v in next, verbosefeatures do
+ verbosefeatures[lower(k)] = v
end
-- can be sped up by local tables
-function tables.to_tag(id)
+function tables.totag(id) -- not used
return format("%4s",lower(id))
end
@@ -4778,14 +4786,14 @@ function meanings.normalize(features)
if k == "language" or k == "lang" then
v = gsub(lower(v),"[^a-z0-9%-]","")
if not languages[v] then
- h.language = to_languages[v] or "dflt"
+ h.language = verboselanguages[v] or "dflt"
else
h.language = v
end
elseif k == "script" then
v = gsub(lower(v),"[^a-z0-9%-]","")
if not scripts[v] then
- h.script = to_scripts[v] or "dflt"
+ h.script = verbosescripts[v] or "dflt"
else
h.script = v
end
@@ -4798,7 +4806,7 @@ function meanings.normalize(features)
v = b
end
end
- k = to_features[k] or k
+ k = verbosefeatures[k] or k
local c = checkers[k]
h[k] = c and c(v) or v
end
@@ -5069,9 +5077,9 @@ of obsolete. Some code may move to runtime or auxiliary modules.</p>
local fonts = fonts
fonts.map = fonts.map or { }
-local function load_lum_table(filename) -- will move to font goodies
+local function loadlumtable(filename) -- will move to font goodies
local lumname = file.replacesuffix(file.basename(filename),"lum")
- local lumfile = resolvers.find_file(lumname,"map") or ""
+ local lumfile = resolvers.findfile(lumname,"map") or ""
if lumfile ~= "" and lfs.isfile(lumfile) then
if trace_loading or trace_unimapping then
report_otf("enhance: loading %s ",lumfile)
@@ -5096,7 +5104,7 @@ local parser = unicode + ucode + index
local parsers = { }
-local function make_name_parser(str)
+local function makenameparser(str)
if not str or str == "" then
return parser
else
@@ -5109,8 +5117,8 @@ local function make_name_parser(str)
end
end
---~ local parser = fonts.map.make_name_parser("Japan1")
---~ local parser = fonts.map.make_name_parser()
+--~ local parser = fonts.map.makenameparser("Japan1")
+--~ local parser = fonts.map.makenameparser()
--~ local function test(str)
--~ local b, a = lpegmatch(parser,str)
--~ print((a and table.serialize(b)) or b)
@@ -5164,8 +5172,8 @@ end
--~ return s
--~ end
-fonts.map.load_lum_table = load_lum_table
-fonts.map.make_name_parser = make_name_parser
+fonts.map.loadlumtable = loadlumtable
+fonts.map.makenameparser = makenameparser
fonts.map.tounicode16 = tounicode16
fonts.map.tounicode16sequence = tounicode16sequence
@@ -5179,7 +5187,7 @@ local ligsplitter = Ct(other * (separator * other)^0)
--~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more")))
--~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more.that")))
-fonts.map.add_to_unicode = function(data,filename)
+fonts.map.addtounicode = function(data,filename)
local unicodes = data.luatex and data.luatex.unicodes
if not unicodes then
return
@@ -5190,11 +5198,11 @@ fonts.map.add_to_unicode = function(data,filename)
unicodes['zwj'] = unicodes['zwj'] or 0x200D
unicodes['zwnj'] = unicodes['zwnj'] or 0x200C
-- the tounicode mapping is sparse and only needed for alternatives
- local tounicode, originals, ns, nl, private, unknown = { }, { }, 0, 0, fonts.private, format("%04X",utfbyte("?"))
+ local tounicode, originals, ns, nl, private, unknown = { }, { }, 0, 0, fonts.privateoffset, format("%04X",utfbyte("?"))
data.luatex.tounicode, data.luatex.originals = tounicode, originals
local lumunic, uparser, oparser
if false then -- will become an option
- lumunic = load_lum_table(filename)
+ lumunic = loadlumtable(filename)
lumunic = lumunic and lumunic.tounicode
end
local cidinfo, cidnames, cidcodes = data.cidinfo
@@ -5202,12 +5210,12 @@ fonts.map.add_to_unicode = function(data,filename)
usedmap = usedmap and lower(usedmap)
usedmap = usedmap and fonts.cid.map[usedmap]
if usedmap then
- oparser = usedmap and make_name_parser(cidinfo.ordering)
+ oparser = usedmap and makenameparser(cidinfo.ordering)
cidnames = usedmap.names
cidcodes = usedmap.unicodes
end
- uparser = make_name_parser()
- local aglmap = fonts.map and fonts.map.agl_to_unicode
+ uparser = makenameparser()
+ local aglmap = fonts.enc.agl and fonts.enc.unicodes -- to name
for index, glyph in next, data.glyphs do
local name, unic = glyph.name, glyph.unicode or -1 -- play safe
if unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then
@@ -5435,6 +5443,8 @@ local abs = math.abs
local getn = table.getn
local lpegmatch = lpeg.match
+local allocate = utilities.storage.allocate
+
local trace_private = false trackers.register("otf.private", function(v) trace_private = v end)
local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
local trace_features = false trackers.register("otf.features", function(v) trace_features = v end)
@@ -5499,9 +5509,11 @@ otf.features = otf.features or { }
otf.features.list = otf.features.list or { }
otf.features.default = otf.features.default or { }
-otf.enhancers = otf.enhancers or { }
+otf.enhancers = allocate()
local enhancers = otf.enhancers
+local definers = fonts.definers
+
otf.glists = { "gsub", "gpos" }
otf.version = 2.653 -- beware: also sync font-mis.lua
@@ -5509,7 +5521,6 @@ otf.pack = true -- beware: also sync font-mis.lua
otf.syncspace = true
otf.notdef = false
otf.cache = containers.define("fonts", "otf", otf.version, true)
-otf.cleanup_aat = false -- only context
local wildcard = "*"
local default = "dflt"
@@ -5520,7 +5531,7 @@ local default = "dflt"
-- we can have more local functions
-otf.tables.global_fields = table.tohash {
+otf.tables.global_fields = allocate( table.tohash {
"lookups",
"glyphs",
"subfonts",
@@ -5536,9 +5547,9 @@ otf.tables.global_fields = table.tohash {
"kern_classes",
"gpos",
"gsub"
-}
+} )
-otf.tables.valid_fields = {
+otf.tables.valid_fields = allocate( {
"anchor_classes",
"ascent",
"cache_version",
@@ -5596,21 +5607,18 @@ otf.tables.valid_fields = {
"weight",
"weight_width_slope_only",
"xuid",
-}
+} )
--[[ldx--
<p>Here we go.</p>
--ldx]]--
local function load_featurefile(ff,featurefile)
- if featurefile then
- featurefile = resolvers.find_file(file.addsuffix(featurefile,'fea'),'fea')
- if featurefile and featurefile ~= "" then
- if trace_loading then
- report_otf("featurefile: %s", featurefile)
- end
- fontloader.apply_featurefile(ff, featurefile)
+ if featurefile and featurefile ~= "" then
+ if trace_loading then
+ report_otf("featurefile: %s", featurefile)
end
+ fontloader.apply_featurefile(ff, featurefile)
end
end
@@ -5635,8 +5643,8 @@ local ordered_enhancers = { -- implemented later
"flatten glyph lookups", "flatten anchor tables", "flatten feature tables",
"simplify glyph lookups", -- some saving
"prepare luatex tables",
- "analyse features", "rehash features",
- "analyse anchors", "analyse marks", "analyse unicodes", "analyse subtables",
+ "analyze features", "rehash features",
+ "analyze anchors", "analyze marks", "analyze unicodes", "analyze subtables",
"check italic correction","check math",
"share widths",
"strip not needed data",
@@ -5644,7 +5652,7 @@ local ordered_enhancers = { -- implemented later
"check math parameters",
}
-local add_dimensions, show_feature_order -- implemented later
+local adddimensions, showfeatureorder -- implemented later
function otf.load(filename,format,sub,featurefile)
local name = file.basename(file.removesuffix(filename))
@@ -5659,8 +5667,50 @@ function otf.load(filename,format,sub,featurefile)
hash = hash .. "-" .. sub
end
hash = containers.cleanname(hash)
+ local featurefiles
+ if featurefile then
+ featurefiles = { }
+ for s in gmatch(featurefile,"[^,]+") do
+ local name = resolvers.findfile(file.addsuffix(s,'fea'),'fea') or ""
+ if name == "" then
+ report_otf("loading: no featurefile '%s'",s)
+ else
+ local attr = lfs.attributes(name)
+ featurefiles[#featurefiles+1] = {
+ name = name,
+ size = attr.size or 0,
+ time = attr.modification or 0,
+ }
+ end
+ end
+ if #featurefiles == 0 then
+ featurefiles = nil
+ end
+ end
local data = containers.read(otf.cache,hash)
- if not data or data.verbose ~= fonts.verbose or data.size ~= size or data.time ~= time then
+ local reload = not data or data.verbose ~= fonts.verbose or data.size ~= size or data.time ~= time
+ if not reload then
+ local featuredata = data.featuredata
+ if featurefiles then
+ if not featuredata or #featuredata ~= #featurefiles then
+ reload = true
+ else
+ for i=1,#featurefiles do
+ local fi, fd = featurefiles[i], featuredata[i]
+ if fi.name ~= fd.name or fi.size ~= fd.size or fi.time ~= fd.time then
+ reload = true
+ break
+ end
+ end
+ end
+ elseif featuredata then
+ reload = true
+ end
+ if reload then
+ report_otf("loading: forced reload due to changed featurefile specification: %s",featurefile or "--")
+ end
+ end
+ if reload then
report_otf("loading: %s (hash: %s)",filename,hash)
local ff, messages
if sub then
@@ -5680,7 +5730,11 @@ function otf.load(filename,format,sub,featurefile)
report_otf("font loaded okay")
end
if ff then
- load_featurefile(ff,featurefile)
+ if featurefiles then
+ for i=1,#featurefiles do
+ load_featurefile(ff,featurefiles[i].name)
+ end
+ end
data = fontloader.to_table(ff)
fontloader.close(ff)
if data then
@@ -5695,6 +5749,9 @@ function otf.load(filename,format,sub,featurefile)
end
data.size = size
data.time = time
+ if featurefiles then
+ data.featuredata = featurefiles
+ end
data.verbose = fonts.verbose
report_otf("saving in cache: %s",filename)
data = containers.write(otf.cache, hash, data)
@@ -5713,15 +5770,15 @@ function otf.load(filename,format,sub,featurefile)
report_otf("loading from cache: %s",hash)
end
enhance("unpack",data,filename,false) -- no message here
- add_dimensions(data)
+ adddimensions(data)
if trace_sequences then
- show_feature_order(data,filename)
+ showfeatureorder(data,filename)
end
end
return data
end
-add_dimensions = function(data)
+adddimensions = function(data)
-- todo: forget about the width if it's the defaultwidth (saves mem)
-- we could also build the marks hash here (instead of storing it)
if data then
@@ -5757,7 +5814,7 @@ add_dimensions = function(data)
end
end
-local function show_feature_order(otfdata,filename)
+local function showfeatureorder(otfdata,filename)
local sequences = otfdata.luatex.sequences
if sequences and #sequences > 0 then
if trace_loading then
@@ -5830,11 +5887,6 @@ enhancers["prepare luatex tables"] = function(data,filename)
luatex.creator = "context mkiv"
end
-enhancers["cleanup aat"] = function(data,filename)
- if otf.cleanup_aat then
- end
-end
-
local function analyze_features(g, features)
if g then
local t, done = { }, { }
@@ -5858,7 +5910,7 @@ local function analyze_features(g, features)
return nil
end
-enhancers["analyse features"] = function(data,filename)
+enhancers["analyze features"] = function(data,filename)
-- local luatex = data.luatex
-- luatex.gposfeatures = analyze_features(data.gpos)
-- luatex.gsubfeatures = analyze_features(data.gsub)
@@ -5895,7 +5947,7 @@ enhancers["rehash features"] = function(data,filename)
end
end
-enhancers["analyse anchors"] = function(data,filename)
+enhancers["analyze anchors"] = function(data,filename)
local classes = data.anchor_classes
local luatex = data.luatex
local anchor_to_lookup, lookup_to_anchor = { }, { }
@@ -5921,7 +5973,7 @@ enhancers["analyse anchors"] = function(data,filename)
end
end
-enhancers["analyse marks"] = function(data,filename)
+enhancers["analyze marks"] = function(data,filename)
local glyphs = data.glyphs
local marks = { }
data.luatex.marks = marks
@@ -5933,9 +5985,9 @@ enhancers["analyse marks"] = function(data,filename)
end
end
-enhancers["analyse unicodes"] = fonts.map.add_to_unicode
+enhancers["analyze unicodes"] = fonts.map.addtounicode
-enhancers["analyse subtables"] = function(data,filename)
+enhancers["analyze subtables"] = function(data,filename)
data.luatex = data.luatex or { }
local luatex = data.luatex
local sequences = { }
@@ -6074,8 +6126,8 @@ enhancers["prepare unicode"] = function(data,filename)
else
mapmap = mapmap.map
end
- local criterium = fonts.private
- local private = fonts.private
+ local criterium = fonts.privateoffset
+ local private = criterium
for index, glyph in next, glyphs do
if index > 0 then
local name = glyph.name
@@ -6780,7 +6832,7 @@ enhancers["flatten feature tables"] = function(data,filename)
end
end
-enhancers.patches = enhancers.patches or { }
+enhancers.patches = allocate()
enhancers["patch bugs"] = function(data,filename)
local basename = file.basename(lower(filename))
@@ -6995,7 +7047,7 @@ local function copytotfm(data,cache_id) -- we can save a copy when we reorder th
end
spaceunits = tonumber(spaceunits) or tfm.units/2 -- 500 -- brrr
-- we need a runtime lookup because of running from cdrom or zip, brrr (shouldn't we use the basename then?)
- local filename = fonts.tfm.checked_filename(luatex)
+ local filename = fonts.tfm.checkedfilename(luatex)
local fontname = metadata.fontname
local fullname = metadata.fullname or fontname
local cidinfo = data.cidinfo
@@ -7107,7 +7159,7 @@ local function otftotfm(specification)
tfmdata.has_italic = otfdata.metadata.has_italic
if not tfmdata.language then tfmdata.language = 'dflt' end
if not tfmdata.script then tfmdata.script = 'dflt' end
- shared.processes, shared.features = otf.setfeatures(tfmdata,fonts.define.check(features,otf.features.default))
+ shared.processes, shared.features = otf.setfeatures(tfmdata,definers.check(features,otf.features.default))
end
end
containers.write(tfm.cache,cache_id,tfmdata)
@@ -7117,7 +7169,7 @@ end
otf.features.register('mathsize')
-function tfm.read_from_open_type(specification) -- wrong namespace
+function tfm.read_from_otf(specification) -- wrong namespace
local tfmtable = otftotfm(specification)
if tfmtable then
local otfdata = tfmtable.shared.otfdata
@@ -7155,7 +7207,7 @@ function tfm.read_from_open_type(specification) -- wrong namespace
end
end
tfmtable = tfm.scale(tfmtable,s,specification.relativeid)
- if tfm.fontname_mode == "specification" then
+ if tfm.fontnamemode == "specification" then
-- not to be used in context !
local specname = specification.specification
if specname then
@@ -7173,7 +7225,7 @@ end
-- helpers
-function otf.collect_lookups(otfdata,kind,script,language)
+function otf.collectlookups(otfdata,kind,script,language)
-- maybe store this in the font
local sequences = otfdata.luatex.sequences
if sequences then
@@ -7220,23 +7272,24 @@ local trace_dynamics = false trackers.register("otf.dynamics", function(v) trac
local report_otf = logs.new("load otf")
-local fonts = fonts
-local otf = fonts.otf
-local fontdata = fonts.ids
+local fonts = fonts
+local otf = fonts.otf
+local fontdata = fonts.ids
otf.features = otf.features or { }
otf.features.default = otf.features.default or { }
-local context_setups = fonts.define.specify.context_setups
-local context_numbers = fonts.define.specify.context_numbers
+local definers = fonts.definers
+local contextsetups = definers.specifiers.contextsetups
+local contextnumbers = definers.specifiers.contextnumbers
-- todo: dynamics namespace
-local a_to_script = { } otf.a_to_script = a_to_script
-local a_to_language = { } otf.a_to_language = a_to_language
+local a_to_script = { }
+local a_to_language = { }
function otf.setdynamics(font,dynamics,attribute)
- local features = context_setups[context_numbers[attribute]] -- can be moved to caller
+ local features = contextsetups[contextnumbers[attribute]] -- can be moved to caller
if features then
local script = features.script or 'dflt'
local language = features.language or 'dflt'
@@ -7253,7 +7306,7 @@ function otf.setdynamics(font,dynamics,attribute)
local dsla = dsl[attribute]
if dsla then
-- if trace_dynamics then
- -- report_otf("using dynamics %s: attribute %s, script %s, language %s",context_numbers[attribute],attribute,script,language)
+ -- report_otf("using dynamics %s: attribute %s, script %s, language %s",contextnumbers[attribute],attribute,script,language)
-- end
return dsla
else
@@ -7273,10 +7326,10 @@ function otf.setdynamics(font,dynamics,attribute)
tfmdata.script = script
tfmdata.shared.features = { }
-- end of save
- local set = fonts.define.check(features,otf.features.default)
+ local set = definers.check(features,otf.features.default)
dsla = otf.setfeatures(tfmdata,set)
if trace_dynamics then
- report_otf("setting dynamics %s: attribute %s, script %s, language %s, set: %s",context_numbers[attribute],attribute,script,language,table.sequenced(set))
+ report_otf("setting dynamics %s: attribute %s, script %s, language %s, set: %s",contextnumbers[attribute],attribute,script,language,table.sequenced(set))
end
-- we need to restore some values
tfmdata.script = saved.script
@@ -7291,6 +7344,14 @@ function otf.setdynamics(font,dynamics,attribute)
return nil -- { }
end
+function otf.scriptandlanguage(tfmdata,attr)
+ if attr and attr > 0 then
+ return a_to_script[attr] or tfmdata.script, a_to_language[attr] or tfmdata.language
+ else
+ return tfmdata.script, tfmdata.language
+ end
+end
+
end -- closure
do -- begin closure to overcome local limits and interference
@@ -7310,11 +7371,8 @@ local fonts = fonts
local otf = fonts.otf
local initializers = fonts.initializers
-otf.default_language = 'latn'
-otf.default_script = 'dflt'
-
-local languages = otf.tables.languages
-local scripts = otf.tables.scripts
+local languages = otf.tables.languages
+local scripts = otf.tables.scripts
local function set_language(tfmdata,value)
if value then
@@ -7517,7 +7575,7 @@ local splitter = lpeg.splitat(" ")
local function prepare_base_substitutions(tfmdata,kind,value) -- we can share some code with the node features
if value then
local otfdata = tfmdata.shared.otfdata
- local validlookups, lookuplist = otf.collect_lookups(otfdata,kind,tfmdata.script,tfmdata.language)
+ local validlookups, lookuplist = otf.collectlookups(otfdata,kind,tfmdata.script,tfmdata.language)
if validlookups then
local ligatures = { }
local unicodes = tfmdata.unicodes -- names to unicodes
@@ -7629,7 +7687,7 @@ end
local function preparebasekerns(tfmdata,kind,value) -- todo what kind of kerns, currently all
if value then
local otfdata = tfmdata.shared.otfdata
- local validlookups, lookuplist = otf.collect_lookups(otfdata,kind,tfmdata.script,tfmdata.language)
+ local validlookups, lookuplist = otf.collectlookups(otfdata,kind,tfmdata.script,tfmdata.language)
if validlookups then
local unicodes = tfmdata.unicodes -- names to unicodes
local indices = tfmdata.indices
@@ -7956,10 +8014,11 @@ local curscurs = attributes.private('curscurs')
local cursdone = attributes.private('cursdone')
local kernpair = attributes.private('kernpair')
-local set_mark = nodes.set_mark
-local set_cursive = nodes.set_cursive
-local set_kern = nodes.set_kern
-local set_pair = nodes.set_pair
+local injections = nodes.injections
+local setmark = injections.setmark
+local setcursive = injections.setcursive
+local setkern = injections.setkern
+local setpair = injections.setpair
local markonce = true
local cursonce = true
@@ -7988,9 +8047,10 @@ local featurevalue = false
-- we cheat a bit and assume that a font,attr combination are kind of ranged
-local context_setups = fonts.define.specify.context_setups
-local context_numbers = fonts.define.specify.context_numbers
-local context_merged = fonts.define.specify.context_merged
+local specifiers = fonts.definers.specifiers
+local contextsetups = specifiers.contextsetups
+local contextnumbers = specifiers.contextnumbers
+local contextmerged = specifiers.contextmerged
-- we cannot optimize with "start = first_character(head)" because then we don't
-- know which rlmode we're in which messes up cursive handling later on
@@ -8347,7 +8407,7 @@ function handlers.gpos_mark2base(start,kind,lookupname,markanchors,sequence)
if al[anchor] then
local ma = markanchors[anchor]
if ma then
- local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma)
+ local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma)
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%s,%s)",
pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -8362,7 +8422,7 @@ function handlers.gpos_mark2base(start,kind,lookupname,markanchors,sequence)
end
else -- if trace_bugs then
-- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- fonts.register_message(currentfont,basechar,"no base anchors")
+ fonts.registermessage(currentfont,basechar,"no base anchors")
end
elseif trace_bugs then
logwarning("%s: prev node is no char",pref(kind,lookupname))
@@ -8415,7 +8475,7 @@ function handlers.gpos_mark2ligature(start,kind,lookupname,markanchors,sequence)
if ma then
ba = ba[index]
if ba then
- local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma,index)
+ local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma,index)
if trace_marks then
logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%s,%s)",
pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
@@ -8432,7 +8492,7 @@ function handlers.gpos_mark2ligature(start,kind,lookupname,markanchors,sequence)
end
else -- if trace_bugs then
-- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- fonts.register_message(currentfont,basechar,"no base anchors")
+ fonts.registermessage(currentfont,basechar,"no base anchors")
end
elseif trace_bugs then
logwarning("%s: prev node is no char",pref(kind,lookupname))
@@ -8462,7 +8522,7 @@ function handlers.gpos_mark2mark(start,kind,lookupname,markanchors,sequence)
if al[anchor] then
local ma = markanchors[anchor]
if ma then
- local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma)
+ local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma)
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%s,%s)",
pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -8478,7 +8538,7 @@ function handlers.gpos_mark2mark(start,kind,lookupname,markanchors,sequence)
end
else -- if trace_bugs then
-- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- fonts.register_message(currentfont,basechar,"no base anchors")
+ fonts.registermessage(currentfont,basechar,"no base anchors")
end
elseif trace_bugs then
logwarning("%s: prev node is no mark",pref(kind,lookupname))
@@ -8520,7 +8580,7 @@ function handlers.gpos_cursive(start,kind,lookupname,exitanchors,sequence) -- to
if al[anchor] then
local exit = exitanchors[anchor]
if exit then
- local dx, dy, bound = set_cursive(start,nxt,tfmdata.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ local dx, dy, bound = setcursive(start,nxt,tfmdata.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
if trace_cursive then
logprocess("%s: moving %s to %s cursive (%s,%s) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
end
@@ -8533,7 +8593,7 @@ function handlers.gpos_cursive(start,kind,lookupname,exitanchors,sequence) -- to
end
else -- if trace_bugs then
-- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
- fonts.register_message(currentfont,startchar,"no entry anchors")
+ fonts.registermessage(currentfont,startchar,"no entry anchors")
end
break
end
@@ -8550,7 +8610,7 @@ end
function handlers.gpos_single(start,kind,lookupname,kerns,sequence)
local startchar = start.char
- local dx, dy, w, h = set_pair(start,tfmdata.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ local dx, dy, w, h = setpair(start,tfmdata.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
if trace_kerns then
logprocess("%s: shifting single %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
end
@@ -8581,14 +8641,14 @@ function handlers.gpos_pair(start,kind,lookupname,kerns,sequence)
local a, b = krn[3], krn[4]
if a and #a > 0 then
local startchar = start.char
- local x, y, w, h = set_pair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
if trace_kerns then
logprocess("%s: shifting first of pair %s and %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
end
if b and #b > 0 then
local startchar = start.char
- local x, y, w, h = set_pair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
if trace_kerns then
logprocess("%s: shifting second of pair %s and %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
@@ -8597,7 +8657,7 @@ function handlers.gpos_pair(start,kind,lookupname,kerns,sequence)
report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
local a, b = krn[3], krn[7]
if a and a ~= 0 then
- local k = set_kern(snext,factor,rlmode,a)
+ local k = setkern(snext,factor,rlmode,a)
if trace_kerns then
logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
end
@@ -8608,7 +8668,7 @@ function handlers.gpos_pair(start,kind,lookupname,kerns,sequence)
end
done = true
elseif krn ~= 0 then
- local k = set_kern(snext,factor,rlmode,krn)
+ local k = setkern(snext,factor,rlmode,krn)
if trace_kerns then
logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
end
@@ -8979,7 +9039,7 @@ function chainprocs.gpos_mark2base(start,stop,kind,chainname,currentcontext,cach
if al[anchor] then
local ma = markanchors[anchor]
if ma then
- local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma)
+ local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma)
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%s,%s)",
cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -9052,7 +9112,7 @@ function chainprocs.gpos_mark2ligature(start,stop,kind,chainname,currentcontext,
if ma then
ba = ba[index]
if ba then
- local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma,index)
+ local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma,index)
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%s,%s)",
cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
@@ -9104,7 +9164,7 @@ function chainprocs.gpos_mark2mark(start,stop,kind,chainname,currentcontext,cach
if al[anchor] then
local ma = markanchors[anchor]
if ma then
- local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma)
+ local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma)
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%s,%s)",
cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -9170,7 +9230,7 @@ function chainprocs.gpos_cursive(start,stop,kind,chainname,currentcontext,cache,
if al[anchor] then
local exit = exitanchors[anchor]
if exit then
- local dx, dy, bound = set_cursive(start,nxt,tfmdata.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ local dx, dy, bound = setcursive(start,nxt,tfmdata.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
if trace_cursive then
logprocess("%s: moving %s to %s cursive (%s,%s) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
end
@@ -9183,7 +9243,7 @@ function chainprocs.gpos_cursive(start,stop,kind,chainname,currentcontext,cache,
end
else -- if trace_bugs then
-- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
- fonts.register_message(currentfont,startchar,"no entry anchors")
+ fonts.registermessage(currentfont,startchar,"no entry anchors")
end
break
end
@@ -9209,7 +9269,7 @@ function chainprocs.gpos_single(start,stop,kind,chainname,currentcontext,cache,c
if kerns then
kerns = kerns[startchar]
if kerns then
- local dx, dy, w, h = set_pair(start,tfmdata.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ local dx, dy, w, h = setpair(start,tfmdata.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
if trace_kerns then
logprocess("%s: shifting single %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
end
@@ -9247,14 +9307,14 @@ function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,cache,cur
local a, b = krn[3], krn[4]
if a and #a > 0 then
local startchar = start.char
- local x, y, w, h = set_pair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
if trace_kerns then
logprocess("%s: shifting first of pair %s and %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
end
if b and #b > 0 then
local startchar = start.char
- local x, y, w, h = set_pair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
if trace_kerns then
logprocess("%s: shifting second of pair %s and %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
@@ -9263,7 +9323,7 @@ function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,cache,cur
report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
local a, b = krn[3], krn[7]
if a and a ~= 0 then
- local k = set_kern(snext,factor,rlmode,a)
+ local k = setkern(snext,factor,rlmode,a)
if trace_kerns then
logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
end
@@ -9274,7 +9334,7 @@ function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,cache,cur
end
done = true
elseif krn ~= 0 then
- local k = set_kern(snext,factor,rlmode,krn)
+ local k = setkern(snext,factor,rlmode,krn)
if trace_kerns then
logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
end
@@ -9678,8 +9738,8 @@ function fonts.methods.node.otf.features(head,font,attr)
local script, language, s_enabled, a_enabled, dyn
local attribute_driven = attr and attr ~= 0
if attribute_driven then
- local features = context_setups[context_numbers[attr]] -- could be a direct list
- dyn = context_merged[attr] or 0
+ local features = contextsetups[contextnumbers[attr]] -- could be a direct list
+ dyn = contextmerged[attr] or 0
language, script = features.language or "dflt", features.script or "dflt"
a_enabled = features -- shared.features -- can be made local to the resolver
if dyn == 2 or dyn == -2 then
@@ -10505,23 +10565,18 @@ local traverse_node_list = node.traverse
local fontdata = fonts.ids
local state = attributes.private('state')
-local fcs = (fonts.color and fonts.color.set) or function() end
-local fcr = (fonts.color and fonts.color.reset) or function() end
-
-local a_to_script = otf.a_to_script
-local a_to_language = otf.a_to_language
+local fontcolors = fonts.colors
+local fcs = (fontscolors and fontscolors.set) or function() end
+local fcr = (fontscolors and fontscolors.reset) or function() end
-- in the future we will use language/script attributes instead of the
-- font related value, but then we also need dynamic features which is
-- somewhat slower; and .. we need a chain of them
+local scriptandlanguage = otf.scriptandlanguage
+
function fonts.initializers.node.otf.analyze(tfmdata,value,attr)
- local script, language
- if attr and attr > 0 then
- script, language = a_to_script[attr], a_to_language[attr]
- else
- script, language = tfmdata.script, tfmdata.language
- end
+ local script, language = otf.scriptandlanguage(tfmdata,attr)
local action = initializers[script]
if action then
if type(action) == "function" then
@@ -10538,12 +10593,7 @@ end
function fonts.methods.node.otf.analyze(head,font,attr)
local tfmdata = fontdata[font]
- local script, language
- if attr and attr > 0 then
- script, language = a_to_script[attr], a_to_language[attr]
- else
- script, language = tfmdata.script, tfmdata.language
- end
+ local script, language = otf.scriptandlanguage(tfmdata,attr)
local action = methods[script]
if action then
if type(action) == "function" then
@@ -10996,6 +11046,8 @@ local format, concat, gmatch, match, find, lower = string.format, table.concat,
local tostring, next = tostring, next
local lpegmatch = lpeg.match
+local allocate = utilities.storage.allocate
+
local trace_defining = false trackers .register("fonts.defining", function(v) trace_defining = v end)
local directive_embedall = false directives.register("fonts.embedall", function(v) directive_embedall = v end)
@@ -11010,33 +11062,38 @@ local report_afm = logs.new("load afm")
default loader that only handles <l n='tfm'/>.</p>
--ldx]]--
-local fonts = fonts
-local tfm = fonts.tfm
-local vf = fonts.vf
-local fontids = fonts.ids
+local fonts = fonts
+local tfm = fonts.tfm
+local vf = fonts.vf
+local fontids = fonts.ids
+
+fonts.used = allocate()
-fonts.used = fonts.used or { }
+tfm.readers = tfm.readers or { }
+tfm.fonts = allocate()
+tfm.internalized = allocate() -- internal tex numbers
-tfm.readers = tfm.readers or { }
-tfm.fonts = tfm.fonts or { }
-tfm.internalized = tfm.internalized or { } -- internal tex numbers
+local readers = tfm.readers
+local sequence = allocate { 'otf', 'ttf', 'afm', 'tfm' }
+readers.sequence = sequence
-local readers = tfm.readers
-local sequence = { 'otf', 'ttf', 'afm', 'tfm' }
-readers.sequence = sequence
+tfm.version = 1.01
+tfm.cache = containers.define("fonts", "tfm", tfm.version, false) -- better in font-tfm
+tfm.autoprefixedafm = true -- this will become false some day (catches texnansi-blabla.*)
-tfm.version = 1.01
-tfm.cache = containers.define("fonts", "tfm", tfm.version, false) -- better in font-tfm
-tfm.auto_afm = true
+fonts.definers = fonts.definers or { }
+local definers = fonts.definers
-fonts.define = fonts.define or { }
-local define = fonts.define
+definers.specifiers = definers.specifiers or { }
+local specifiers = definers.specifiers
-define.method = "afm or tfm" -- afm, tfm, afm or tfm, tfm or afm
-define.specify = define.specify or { }
-define.methods = define.methods or { }
+specifiers.variants = allocate()
+local variants = specifiers.variants
-local findbinfile = resolvers.findbinfile
+definers.method = "afm or tfm" -- afm, tfm, afm or tfm, tfm or afm
+definers.methods = definers.methods or { }
+
+local findbinfile = resolvers.findbinfile
--[[ldx--
<p>We hardly gain anything when we cache the final (pre scaled)
@@ -11065,7 +11122,7 @@ and prepares a table that will move along as we proceed.</p>
-- name name(sub) name(sub)*spec name*spec
-- name@spec*oeps
-local splitter, specifiers = nil, ""
+local splitter, splitspecifiers = nil, ""
local P, C, S, Cc = lpeg.P, lpeg.C, lpeg.S, lpeg.Cc
@@ -11074,13 +11131,13 @@ local right = P(")")
local colon = P(":")
local space = P(" ")
-define.defaultlookup = "file"
+definers.defaultlookup = "file"
local prefixpattern = P(false)
-function define.add_specifier(symbol)
- specifiers = specifiers .. symbol
- local method = S(specifiers)
+local function addspecifier(symbol)
+ splitspecifiers = splitspecifiers .. symbol
+ local method = S(splitspecifiers)
local lookup = C(prefixpattern) * colon
local sub = left * C(P(1-left-right-method)^1) * right
local specification = C(method) * C(P(1)^1)
@@ -11088,24 +11145,28 @@ function define.add_specifier(symbol)
splitter = P((lookup + Cc("")) * name * (sub + Cc("")) * (specification + Cc("")))
end
-function define.add_lookup(str,default)
+local function addlookup(str,default)
prefixpattern = prefixpattern + P(str)
end
-define.add_lookup("file")
-define.add_lookup("name")
-define.add_lookup("spec")
+definers.addlookup = addlookup
+
+addlookup("file")
+addlookup("name")
+addlookup("spec")
-function define.get_specification(str)
+local function getspecification(str)
return lpegmatch(splitter,str)
end
-function define.register_split(symbol,action)
- define.add_specifier(symbol)
- define.specify[symbol] = action
+definers.getspecification = getspecification
+
+function definers.registersplit(symbol,action)
+ addspecifier(symbol)
+ variants[symbol] = action
end
-function define.makespecification(specification, lookup, name, sub, method, detail, size)
+function definers.makespecification(specification, lookup, name, sub, method, detail, size)
size = size or 655360
if trace_defining then
report_define("%s -> lookup: %s, name: %s, sub: %s, method: %s, detail: %s",
@@ -11113,7 +11174,7 @@ function define.makespecification(specification, lookup, name, sub, method, deta
(sub ~= "" and sub) or "-", (method ~= "" and method) or "-", (detail ~= "" and detail) or "-")
end
if not lookup or lookup == "" then
- lookup = define.defaultlookup
+ lookup = definers.defaultlookup
end
local t = {
lookup = lookup, -- forced type
@@ -11130,10 +11191,10 @@ function define.makespecification(specification, lookup, name, sub, method, deta
return t
end
-function define.analyze(specification, size)
+function definers.analyze(specification, size)
-- can be optimized with locals
- local lookup, name, sub, method, detail = define.get_specification(specification or "")
- return define.makespecification(specification, lookup, name, sub, method, detail, size)
+ local lookup, name, sub, method, detail = getspecification(specification or "")
+ return definers.makespecification(specification, lookup, name, sub, method, detail, size)
end
--[[ldx--
@@ -11142,7 +11203,7 @@ end
local sortedhashkeys = table.sortedhashkeys
-function tfm.hash_features(specification)
+function tfm.hashfeatures(specification)
local features = specification.features
if features then
local t = { }
@@ -11174,7 +11235,7 @@ function tfm.hash_features(specification)
return "unknown"
end
-fonts.designsizes = { }
+fonts.designsizes = allocate()
--[[ldx--
<p>In principle we can share tfm tables when we are in node for a font, but then
@@ -11184,10 +11245,10 @@ when we get rid of base mode we can optimize even further by sharing, but then w
loose our testcases for <l n='luatex'/>.</p>
--ldx]]--
-function tfm.hash_instance(specification,force)
+function tfm.hashinstance(specification,force)
local hash, size, fallbacks = specification.hash, specification.size, specification.fallbacks
if force or not hash then
- hash = tfm.hash_features(specification)
+ hash = tfm.hashfeatures(specification)
specification.hash = hash
end
if size < 1000 and fonts.designsizes[hash] then
@@ -11215,8 +11276,8 @@ end
<p>We can resolve the filename using the next function:</p>
--ldx]]--
-define.resolvers = define.resolvers or { }
-local resolvers = define.resolvers
+definers.resolvers = definers.resolvers or { }
+local resolvers = definers.resolvers
-- todo: reporter
@@ -11260,7 +11321,7 @@ function resolvers.spec(specification)
end
end
-function define.resolve(specification)
+function definers.resolve(specification)
if not specification.resolved or specification.resolved == "" then -- resolved itself not per se in mapping hash
local r = resolvers[specification.lookup]
if r then
@@ -11281,7 +11342,7 @@ function define.resolve(specification)
end
end
--
- specification.hash = lower(specification.name .. ' @ ' .. tfm.hash_features(specification))
+ specification.hash = lower(specification.name .. ' @ ' .. tfm.hashfeatures(specification))
if specification.sub and specification.sub ~= "" then
specification.hash = specification.sub .. ' @ ' .. specification.hash
end
@@ -11305,7 +11366,7 @@ specification yet.</p>
--ldx]]--
function tfm.read(specification)
- local hash = tfm.hash_instance(specification)
+ local hash = tfm.hashinstance(specification)
local tfmtable = tfm.fonts[hash] -- hashes by size !
if not tfmtable then
local forced = specification.forced or ""
@@ -11353,22 +11414,22 @@ end
<p>For virtual fonts we need a slightly different approach:</p>
--ldx]]--
-function tfm.read_and_define(name,size) -- no id
- local specification = define.analyze(name,size)
+function tfm.readanddefine(name,size) -- no id
+ local specification = definers.analyze(name,size)
local method = specification.method
- if method and define.specify[method] then
- specification = define.specify[method](specification)
+ if method and variants[method] then
+ specification = variants[method](specification)
end
- specification = define.resolve(specification)
- local hash = tfm.hash_instance(specification)
- local id = define.registered(hash)
+ specification = definers.resolve(specification)
+ local hash = tfm.hashinstance(specification)
+ local id = definers.registered(hash)
if not id then
local fontdata = tfm.read(specification)
if fontdata then
fontdata.hash = hash
id = font.define(fontdata)
- define.register(fontdata,id)
- tfm.cleanup_table(fontdata)
+ definers.register(fontdata,id)
+ tfm.cleanuptable(fontdata)
else
id = 0 -- signal
end
@@ -11388,6 +11449,9 @@ local function check_tfm(specification,fullname)
if foundname == "" then
foundname = findbinfile(fullname, 'ofm') or "" -- bonus for usage outside context
end
+ if foundname == "" then
+ foundname = fonts.names.getfilename(fullname,"tfm")
+ end
if foundname ~= "" then
specification.filename, specification.format = foundname, "ofm"
return tfm.read_from_tfm(specification)
@@ -11396,13 +11460,15 @@ end
local function check_afm(specification,fullname)
local foundname = findbinfile(fullname, 'afm') or "" -- just to be sure
- if foundname == "" and tfm.auto_afm then
+ if foundname == "" then
+ foundname = fonts.names.getfilename(fullname,"afm")
+ end
+ if foundname == "" and tfm.autoprefixedafm then
local encoding, shortname = match(fullname,"^(.-)%-(.*)$") -- context: encoding-name.*
if encoding and shortname and fonts.enc.known[encoding] then
shortname = findbinfile(shortname,'afm') or "" -- just to be sure
if shortname ~= "" then
foundname = shortname
- -- tfm.set_normal_feature(specification,'encoding',encoding) -- will go away
if trace_loading then
report_afm("stripping encoding prefix from filename %s",afmname)
end
@@ -11439,7 +11505,7 @@ function readers.afm(specification,method)
tfmtable = check_afm(specification,specification.name .. "." .. forced)
end
if not tfmtable then
- method = method or define.method or "afm or tfm"
+ method = method or definers.method or "afm or tfm"
if method == "tfm" then
tfmtable = check_tfm(specification,specification.name)
elseif method == "afm" then
@@ -11464,21 +11530,26 @@ local function check_otf(forced,specification,suffix,what)
name = file.addsuffix(name,suffix,true)
end
local fullname, tfmtable = findbinfile(name,suffix) or "", nil -- one shot
+ -- if false then -- can be enabled again when needed
+ -- if fullname == "" then
+ -- local fb = fonts.names.old_to_new[name]
+ -- if fb then
+ -- fullname = findbinfile(fb,suffix) or ""
+ -- end
+ -- end
+ -- if fullname == "" then
+ -- local fb = fonts.names.new_to_old[name]
+ -- if fb then
+ -- fullname = findbinfile(fb,suffix) or ""
+ -- end
+ -- end
+ -- end
if fullname == "" then
- local fb = fonts.names.old_to_new[name]
- if fb then
- fullname = findbinfile(fb,suffix) or ""
- end
- end
- if fullname == "" then
- local fb = fonts.names.new_to_old[name]
- if fb then
- fullname = findbinfile(fb,suffix) or ""
- end
+ fullname = fonts.names.getfilename(name,suffix)
end
if fullname ~= "" then
specification.filename, specification.format = fullname, what -- hm, so we do set the filename, then
- tfmtable = tfm.read_from_open_type(specification) -- we need to do it for all matches / todo
+ tfmtable = tfm.read_from_otf(specification) -- we need to do it for all matches / todo
end
return tfmtable
end
@@ -11504,7 +11575,7 @@ function readers.dfont(specification) return readers.opentype(specification,"ttf
a helper function.</p>
--ldx]]--
-function define.check(features,defaults) -- nb adapts features !
+function definers.check(features,defaults) -- nb adapts features !
local done = false
if features and next(features) then
for k,v in next, defaults do
@@ -11519,7 +11590,7 @@ function define.check(features,defaults) -- nb adapts features !
end
--[[ldx--
-<p>So far the specifyers. Now comes the real definer. Here we cache
+<p>So far the specifiers. Now comes the real definer. Here we cache
based on id's. Here we also intercept the virtual font handler. Since
it evolved stepwise I may rewrite this bit (combine code).</p>
@@ -11530,9 +11601,13 @@ not gain much. By the way, passing id's back to in the callback was
introduced later in the development.</p>
--ldx]]--
-define.last = nil
+local lastdefined = nil -- we don't want this one to end up in s-tra-02
-function define.register(fontdata,id)
+function definers.current() -- or maybe current
+ return lastdefined
+end
+
+function definers.register(fontdata,id)
if fontdata and id then
local hash = fontdata.hash
if not tfm.internalized[hash] then
@@ -11548,7 +11623,7 @@ function define.register(fontdata,id)
end
end
-function define.registered(hash)
+function definers.registered(hash)
local id = tfm.internalized[hash]
return id, id and fonts.ids[id]
end
@@ -11563,7 +11638,7 @@ function tfm.make(specification)
-- however, when virtual tricks are used as feature (makes more
-- sense) we scale the commands in fonts.tfm.scale (and set the
-- factor there)
- local fvm = define.methods[specification.features.vtf.preset]
+ local fvm = definers.methods.variants[specification.features.vtf.preset]
if fvm then
return fvm(specification)
else
@@ -11571,28 +11646,28 @@ function tfm.make(specification)
end
end
-function define.read(specification,size,id) -- id can be optional, name can already be table
+function definers.read(specification,size,id) -- id can be optional, name can already be table
statistics.starttiming(fonts)
if type(specification) == "string" then
- specification = define.analyze(specification,size)
+ specification = definers.analyze(specification,size)
end
local method = specification.method
- if method and define.specify[method] then
- specification = define.specify[method](specification)
+ if method and variants[method] then
+ specification = variants[method](specification)
end
- specification = define.resolve(specification)
- local hash = tfm.hash_instance(specification)
+ specification = definers.resolve(specification)
+ local hash = tfm.hashinstance(specification)
if cache_them then
local fontdata = containers.read(fonts.cache,hash) -- for tracing purposes
end
- local fontdata = define.registered(hash) -- id
+ local fontdata = definers.registered(hash) -- id
if not fontdata then
if specification.features.vtf and specification.features.vtf.preset then
fontdata = tfm.make(specification)
else
fontdata = tfm.read(specification)
if fontdata then
- tfm.check_virtual_id(fontdata)
+ tfm.checkvirtualid(fontdata)
end
end
if cache_them then
@@ -11602,11 +11677,11 @@ function define.read(specification,size,id) -- id can be optional, name can alre
fontdata.hash = hash
fontdata.cache = "no"
if id then
- define.register(fontdata,id)
+ definers.register(fontdata,id)
end
end
end
- define.last = fontdata or id -- todo ! ! ! ! !
+ lastdefined = fontdata or id -- todo ! ! ! ! !
if not fontdata then
report_define( "unknown font %s, loading aborted",specification.name)
elseif trace_defining and type(fontdata) == "table" then
@@ -11626,7 +11701,7 @@ end
function vf.find(name)
name = file.removesuffix(file.basename(name))
- if tfm.resolve_vf then
+ if tfm.resolvevirtualtoo then
local format = fonts.logger.format(name)
if format == 'tfm' or format == 'ofm' then
if trace_defining then
@@ -11651,7 +11726,7 @@ end
<p>We overload both the <l n='tfm'/> and <l n='vf'/> readers.</p>
--ldx]]--
-callbacks.register('define_font' , define.read, "definition of fonts (tfmtable preparation)")
+callbacks.register('define_font' , definers.read, "definition of fonts (tfmtable preparation)")
callbacks.register('find_vf_file', vf.find , "locating virtual fonts, insofar needed") -- not that relevant any more
end -- closure
@@ -11692,15 +11767,15 @@ of the specifier.</p>
--ldx]]--
local fonts = fonts
-local define = fonts.define
-local specify = define.specify
+local definers = fonts.definers
+local specifiers = definers.specifiers
local normalize_meanings = fonts.otf.meanings.normalize
local list = { }
-specify.colonized_default_lookup = "file"
+specifiers.colonizedpreference = "file"
-local function issome () list.lookup = specify.colonized_default_lookup end
+local function issome () list.lookup = specifiers.colonizedpreference end
local function isfile () list.lookup = 'file' end
local function isname () list.lookup = 'name' end
local function thename(s) list.name = s end
@@ -11757,9 +11832,7 @@ local function colonized(specification) -- xetex mode
return specification
end
-specify.colonized = colonized
-
-define.register_split(":",colonized)
+definers.registersplit(":",colonized)
end -- closure
@@ -11777,9 +11850,9 @@ fonts = fonts or { }
-- general
-fonts.otf.pack = false -- only makes sense in context
-fonts.tfm.resolve_vf = false -- no sure about this
-fonts.tfm.fontname_mode = "specification" -- somehow latex needs this
+fonts.otf.pack = false -- only makes sense in context
+fonts.tfm.resolvevirtualtoo = false -- context specific (du eto resolver)
+fonts.tfm.fontnamemode = "specification" -- somehow latex needs this (changed name!)
-- readers
@@ -11789,13 +11862,12 @@ fonts.tfm.readers.afm = nil
-- define
-fonts.define = fonts.define or { }
+fonts.definers = fonts.definers or { }
+fonts.definers.specifiers = fonts.definers.specifiers or { }
---~ fonts.define.method = "tfm"
+fonts.definers.specifiers.specifiers.colonizedpreference = "name" -- is "file" in context
-fonts.define.specify.colonized_default_lookup = "name"
-
-function fonts.define.get_specification(str)
+function fonts.definers.getspecification(str)
return "", str, "", ":", str
end
@@ -11830,7 +11902,7 @@ function fonts.names.resolve(name,sub)
if basename and basename ~= "" then
for i=1,#fileformats do
local format = fileformats[i]
- local foundname = resolvers.find_file(basename,format) or ""
+ local foundname = resolvers.findfile(basename,format) or ""
if foundname ~= "" then
data = dofile(foundname)
break
@@ -11977,7 +12049,7 @@ fonts.initializers.node.otf.expansion = fonts.initializers.common.expansion
-- left over
-function fonts.register_message()
+function fonts.registermessage()
end
-- example vectors
@@ -12028,6 +12100,12 @@ fonts.otf.meanings.normalize = fonts.otf.meanings.normalize or function(t)
end
end
+-- needed (different in context)
+
+function otf.scriptandlanguage(tfmdata)
+ return tfmdata.script, tfmdata.language
+end
+
-- bonus
function fonts.otf.nametoslot(name)
diff --git a/tex/generic/context/luatex-fonts.lua b/tex/generic/context/luatex-fonts.lua
index 0d89a60e2..66cc14992 100644
--- a/tex/generic/context/luatex-fonts.lua
+++ b/tex/generic/context/luatex-fonts.lua
@@ -129,7 +129,7 @@ callback.register('ligaturing', false)
callback.register('kerning', false)
callback.register('pre_linebreak_filter', nodes.simple_font_handler)
callback.register('hpack_filter', nodes.simple_font_handler)
-callback.register('define_font' , fonts.define.read)
+callback.register('define_font' , fonts.definers.read)
callback.register('find_vf_file', nil) -- reset to normal
-- We're done.