summaryrefslogtreecommitdiff
path: root/tex
diff options
context:
space:
mode:
authorContext Git Mirror Bot <phg42.2a@gmail.com>2015-07-09 16:15:05 +0200
committerContext Git Mirror Bot <phg42.2a@gmail.com>2015-07-09 16:15:05 +0200
commitd7b51cfffa3d797c9a19d6cf5366150bcf032554 (patch)
treebafdf991019b3a94835b4593ca7973cdeab2bcde /tex
parentdc7195d9c40c947f61bd001635de6e8b2cb99167 (diff)
downloadcontext-d7b51cfffa3d797c9a19d6cf5366150bcf032554.tar.gz
2015-07-09 15:25:00
Diffstat (limited to 'tex')
-rw-r--r--tex/context/base/char-tex.lua5
-rw-r--r--tex/context/base/cont-new.mkiv2
-rw-r--r--tex/context/base/context-version.pdfbin4202 -> 4211 bytes
-rw-r--r--tex/context/base/context.mkiv2
-rw-r--r--tex/context/base/font-cff.lua6
-rw-r--r--tex/context/base/font-ctx.lua22
-rw-r--r--tex/context/base/font-dsp.lua2154
-rw-r--r--tex/context/base/font-gds.lua22
-rw-r--r--tex/context/base/font-lib.mkvi1
-rw-r--r--tex/context/base/font-mis.lua2
-rw-r--r--tex/context/base/font-osd.lua2397
-rw-r--r--tex/context/base/font-otc.lua357
-rw-r--r--tex/context/base/font-otd.lua20
-rw-r--r--tex/context/base/font-otf.lua45
-rw-r--r--tex/context/base/font-otl.lua687
-rw-r--r--tex/context/base/font-oto.lua452
-rw-r--r--tex/context/base/font-otr.lua60
-rw-r--r--tex/context/base/font-ots.lua3104
-rw-r--r--tex/context/base/font-oup.lua1921
-rw-r--r--tex/context/base/font-syn.lua6
-rw-r--r--tex/context/base/font-tmp.lua2
-rw-r--r--tex/context/base/meta-imp-outlines.mkiv3
-rw-r--r--tex/context/base/mult-de.mkii14
-rw-r--r--tex/context/base/mult-def.lua114
-rw-r--r--tex/context/base/mult-en.mkii14
-rw-r--r--tex/context/base/mult-fr.mkii14
-rw-r--r--tex/context/base/mult-it.mkii14
-rw-r--r--tex/context/base/mult-nl.mkii14
-rw-r--r--tex/context/base/mult-pe.mkii14
-rw-r--r--tex/context/base/mult-ro.mkii14
-rw-r--r--tex/context/base/node-nut.lua16
-rw-r--r--tex/context/base/status-files.pdfbin24416 -> 24346 bytes
-rw-r--r--tex/context/base/status-lua.pdfbin254210 -> 254399 bytes
-rw-r--r--tex/context/base/strc-bkm.mkiv2
-rw-r--r--tex/context/base/typo-mar.mkiv48
-rw-r--r--tex/context/base/util-sto.lua21
-rw-r--r--tex/context/base/x-set-11.mkiv10
-rw-r--r--tex/context/interface/keys-cs.xml22
-rw-r--r--tex/context/interface/keys-de.xml22
-rw-r--r--tex/context/interface/keys-en.xml22
-rw-r--r--tex/context/interface/keys-fr.xml22
-rw-r--r--tex/context/interface/keys-it.xml22
-rw-r--r--tex/context/interface/keys-nl.xml22
-rw-r--r--tex/context/interface/keys-pe.xml22
-rw-r--r--tex/context/interface/keys-ro.xml22
-rw-r--r--tex/generic/context/luatex/luatex-fonts-merged.lua40
46 files changed, 11504 insertions, 291 deletions
diff --git a/tex/context/base/char-tex.lua b/tex/context/base/char-tex.lua
index 2093c6d6c..df7433409 100644
--- a/tex/context/base/char-tex.lua
+++ b/tex/context/base/char-tex.lua
@@ -23,7 +23,6 @@ local mark = utilities.storage.mark
local context = context
local commands = commands
-local implement = interfaces.implement
local characters = characters
local texcharacters = { }
@@ -419,6 +418,10 @@ end
-- all kind of initializations
+if not interfaces then return end
+
+local implement = interfaces.implement
+
local tex = tex
local texsetlccode = tex.setlccode
local texsetuccode = tex.setuccode
diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv
index 910276a43..5409438c1 100644
--- a/tex/context/base/cont-new.mkiv
+++ b/tex/context/base/cont-new.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2015.07.07 21:43}
+\newcontextversion{2015.07.09 15:23}
%D This file is loaded at runtime, thereby providing an excellent place for
%D hacks, patches, extensions and new features.
diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf
index a1918c9bd..dc5468cab 100644
--- a/tex/context/base/context-version.pdf
+++ b/tex/context/base/context-version.pdf
Binary files differ
diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv
index 6b47c4a98..769e6b993 100644
--- a/tex/context/base/context.mkiv
+++ b/tex/context/base/context.mkiv
@@ -39,7 +39,7 @@
%D up and the dependencies are more consistent.
\edef\contextformat {\jobname}
-\edef\contextversion{2015.07.07 21:43}
+\edef\contextversion{2015.07.09 15:23}
\edef\contextkind {beta}
%D For those who want to use this:
diff --git a/tex/context/base/font-cff.lua b/tex/context/base/font-cff.lua
index 2f0198ece..d4acf7dc2 100644
--- a/tex/context/base/font-cff.lua
+++ b/tex/context/base/font-cff.lua
@@ -604,7 +604,7 @@ do
end
end
- -- All these indirect calls makt this run slower but it's cleaner this way
+ -- All these indirect calls make this run slower but it's cleaner this way
-- and we cache the result. As we moved the boundingbox code inline we gain
-- some back.
@@ -671,7 +671,6 @@ do
showvalue("width",width)
end
end
- top = top - 1
elseif not width then
width = true
end
@@ -692,7 +691,6 @@ do
showvalue("width",width)
end
end
- top = top - 1
elseif not width then
width = true
end
@@ -712,7 +710,6 @@ do
showvalue("width",width)
end
end
- top = top - 1
elseif not width then
width = true
end
@@ -1311,6 +1308,7 @@ do
width = nominalwidth + width
end
--
+ -- trace_charstrings = index == 3078 -- todo: make tracker
local glyph = glyphs[index] -- can be autodefined in otr
if not glyph then
glyphs[index] = {
diff --git a/tex/context/base/font-ctx.lua b/tex/context/base/font-ctx.lua
index 81db31652..da8373c98 100644
--- a/tex/context/base/font-ctx.lua
+++ b/tex/context/base/font-ctx.lua
@@ -297,17 +297,19 @@ local privatefeatures = {
}
local function checkedscript(tfmdata,resources,features)
- local latn = false
+ local latn = false
local script = false
- for g, list in next, resources.features do
- for f, scripts in next, list do
- if privatefeatures[f] then
- -- skip
- elseif scripts.dflt then
- script = "dflt"
- break
- elseif scripts.latn then
- latn = true
+ if resources.features then
+ for g, list in next, resources.features do
+ for f, scripts in next, list do
+ if privatefeatures[f] then
+ -- skip
+ elseif scripts.dflt then
+ script = "dflt"
+ break
+ elseif scripts.latn then
+ latn = true
+ end
end
end
end
diff --git a/tex/context/base/font-dsp.lua b/tex/context/base/font-dsp.lua
new file mode 100644
index 000000000..28e4aad6c
--- /dev/null
+++ b/tex/context/base/font-dsp.lua
@@ -0,0 +1,2154 @@
+if not modules then modules = { } end modules ['font-dsp'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- many 0,0 entry/exit
+
+-- This loader went through a few iterations. First I made a ff compatible one so
+-- that we could do some basic checking. Also some verbosity was added (named
+-- glyphs). Eventually all that was dropped for a context friendly format, simply
+-- because keeping the different table models in sync too to much time. I have the
+-- old file somewhere. A positive side effect is that we get an (upto) much smaller
+-- smaller tma/tmc file. In the end the loader will be not much slower than the
+-- c based ff one.
+
+-- Being binary encoded, an opentype is rather compact. When expanded into a Lua table
+-- quite some memory can be used. This is very noticeable in the ff loader, which for
+-- a good reason uses a verbose format. However, when we use that data we create a couple
+-- of hashes. In the Lua loader we create these hashes directly, which save quite some
+-- memory.
+--
+-- We convert a font file only once and then cache it. Before creating the cached instance
+-- packing takes place: common tables get shared. After (re)loading and unpacking we then
+-- get a rather efficient internal representation of the font. In the new loader there is a
+-- pitfall. Because we use some common coverage magic we put a bit more information in
+-- the mark and cursive coverage tables than strickly needed: a reference to the coverage
+-- itself. This permits a fast lookup of the second glyph involved. In the marks we
+-- expand the class indicator to a class hash, in the cursive we use a placeholder that gets
+-- a self reference. This means that we cannot pack these subtables unless we add a unique
+-- id per entry (the same one per coverage) and that makes the tables larger. Because only a
+-- few fonts benefit from this, I decided to not do this. Experiments demonstrated that it
+-- only gives a few percent gain (on for instance husayni we can go from 845K to 828K
+-- bytecode). Better stay conceptually clean than messy compact.
+
+-- When we can reduce all basic lookups to one step we might safe a bit in the processing
+-- so then only chains are multiple.
+
+-- I used to flatten kerns here but that has been moved elsewhere because it polutes the code
+-- here and can be done fast afterwards. One can even wonder if it makes sense to do it as we
+-- pack anyway. In a similar fashion the unique placeholders in anchors in marks have been
+-- removed because packing doesn't save much there anyway.
+
+-- Although we have a bit more efficient tables in the cached files, the internals are still
+-- pretty similar. And although we have a slightly more direct coverage access the processing
+-- of node lists is not noticeable faster for latin texts, but for arabic we gain some 10%
+-- (and could probably gain a bit more).
+
+local next, type = next, type
+local bittest = bit32.btest
+local rshift = bit32.rshift
+local concat = table.concat
+local lower = string.lower
+local sub = string.sub
+local strip = string.strip
+local tohash = table.tohash
+local reversed = table.reversed
+
+local setmetatableindex = table.setmetatableindex
+local formatters = string.formatters
+local sortedkeys = table.sortedkeys
+local sortedhash = table.sortedhash
+
+local report = logs.reporter("otf reader")
+
+local readers = fonts.handlers.otf.readers
+local streamreader = readers.streamreader
+
+local setposition = streamreader.setposition
+local skipbytes = streamreader.skip
+local skipshort = streamreader.skipshort
+local readushort = streamreader.readcardinal2 -- 16-bit unsigned integer
+local readulong = streamreader.readcardinal4 -- 24-bit unsigned integer
+local readshort = streamreader.readinteger2 -- 16-bit signed integer
+local readfword = readshort
+local readstring = streamreader.readstring
+local readtag = streamreader.readtag
+
+local gsubhandlers = { }
+local gposhandlers = { }
+
+local lookupidoffset = -1 -- will become 1 when we migrate (only -1 for comparign with old)
+
+local classes = {
+ "base",
+ "ligature",
+ "mark",
+ "component",
+}
+
+local gsubtypes = {
+ "single",
+ "multiple",
+ "alternate",
+ "ligature",
+ "context",
+ "chainedcontext",
+ "extension",
+ "reversechainedcontextsingle",
+}
+
+local gpostypes = {
+ "single",
+ "pair",
+ "cursive",
+ "marktobase",
+ "marktoligature",
+ "marktomark",
+ "context",
+ "chainedcontext",
+ "extension",
+}
+
+local chaindirections = {
+ context = 0,
+ chainedcontext = 1,
+ reversechainedcontextsingle = -1,
+}
+
+-- Traditionally we use these unique names (so that we can flatten the lookup list
+-- (we create subsets runtime) but I will adapt the old code to newer names.
+
+-- chainsub
+-- reversesub
+
+local lookupnames = {
+ gsub = {
+ single = "gsub_single",
+ multiple = "gsub_multiple",
+ alternate = "gsub_alternate",
+ ligature = "gsub_ligature",
+ context = "gsub_context",
+ chainedcontext = "gsub_contextchain",
+ reversechainedcontextsingle = "gsub_reversecontextchain", -- reversesub
+ },
+ gpos = {
+ single = "gpos_single",
+ pair = "gpos_pair",
+ cursive = "gpos_cursive",
+ marktobase = "gpos_mark2base",
+ marktoligature = "gpos_mark2ligature",
+ marktomark = "gpos_mark2mark",
+ context = "gpos_context",
+ chainedcontext = "gpos_contextchain",
+ }
+}
+
+-- keep this as reference:
+--
+-- local lookupbits = {
+-- [0x0001] = "righttoleft",
+-- [0x0002] = "ignorebaseglyphs",
+-- [0x0004] = "ignoreligatures",
+-- [0x0008] = "ignoremarks",
+-- [0x0010] = "usemarkfilteringset",
+-- [0x00E0] = "reserved",
+-- [0xFF00] = "markattachmenttype",
+-- }
+--
+-- local lookupstate = setmetatableindex(function(t,k)
+-- local v = { }
+-- for kk, vv in next, lookupbits do
+-- if bittest(k,kk) then
+-- v[vv] = true
+-- end
+-- end
+-- t[k] = v
+-- return v
+-- end)
+
+local lookupflags = setmetatableindex(function(t,k)
+ local v = {
+ bittest(k,0x0008) and true or false, -- ignoremarks
+ bittest(k,0x0004) and true or false, -- ignoreligatures
+ bittest(k,0x0002) and true or false, -- ignorebaseglyphs
+ bittest(k,0x0001) and true or false, -- r2l
+ }
+ t[k] = v
+ return v
+end)
+
+-- Beware: only use the simple variant if we don't set keys/values (otherwise too many entries). We
+-- could also have a variant that applies a function but there is no real benefit in this.
+
+local function readcoverage(f,offset,simple)
+ setposition(f,offset)
+ local coverageformat = readushort(f)
+ local coverage = { }
+ if coverageformat == 1 then
+ local nofcoverage = readushort(f)
+ if simple then
+ for i=1,nofcoverage do
+ coverage[i] = readushort(f)
+ end
+ else
+ for i=0,nofcoverage-1 do
+ coverage[readushort(f)] = i -- index in record
+ end
+ end
+ elseif coverageformat == 2 then
+ local nofranges = readushort(f)
+ local n = simple and 1 or 0 -- needs checking
+ for i=1,nofranges do
+ local firstindex = readushort(f)
+ local lastindex = readushort(f)
+ local coverindex = readushort(f)
+ if simple then
+ for i=firstindex,lastindex do
+ coverage[n] = i
+ n = n + 1
+ end
+ else
+ for i=firstindex,lastindex do
+ coverage[i] = n
+ n = n + 1
+ end
+ end
+ end
+ else
+ report("unknown coverage format %a ",coverageformat)
+ end
+ return coverage
+end
+
+local function readclassdef(f,offset)
+ setposition(f,offset)
+ local classdefformat = readushort(f)
+ local classdef = { }
+ if classdefformat == 1 then
+ local index = readushort(f)
+ local nofclassdef = readushort(f)
+ for i=1,nofclassdef do
+ classdef[index] = readushort(f) + 1
+ index = index + 1
+ end
+ elseif classdefformat == 2 then
+ local nofranges = readushort(f)
+ local n = 0
+ for i=1,nofranges do
+ local firstindex = readushort(f)
+ local lastindex = readushort(f)
+ local class = readushort(f) + 1
+ for i=firstindex,lastindex do
+ classdef[i] = class
+ end
+ end
+ else
+ report("unknown classdef format %a ",classdefformat)
+ end
+ return classdef
+end
+
+local function classtocoverage(defs)
+ if defs then
+ local list = { }
+ for index, class in next, defs do
+ local c = list[class]
+ if c then
+ c[#c+1] = index
+ else
+ list[class] = { index }
+ end
+ end
+ return list
+ end
+end
+
+-- extra readers
+
+local function readposition(f,format)
+ if format == 0 then
+ return nil
+ end
+ -- maybe fast test on 0x0001 + 0x0002 + 0x0004 + 0x0008 (profile first)
+ local x = bittest(format,0x0001) and readshort(f) or 0 -- placement
+ local y = bittest(format,0x0002) and readshort(f) or 0 -- placement
+ local h = bittest(format,0x0004) and readshort(f) or 0 -- advance
+ local v = bittest(format,0x0008) and readshort(f) or 0 -- advance
+ if x == 0 and y == 0 and h == 0 and v == 0 then
+ return nil
+ else
+ return { x, y, h, v }
+ end
+end
+
+local function readanchor(f,offset)
+ if not offset or offset == 0 then
+ return nil -- false
+ end
+ setposition(f,offset)
+ local format = readshort(f)
+ if format == 0 then
+ report("invalid anchor format %i @ position %i",format,offset)
+ return false
+ elseif format > 3 then
+ report("unsupported anchor format %i @ position %i",format,offset)
+ return false
+ end
+ return { readshort(f), readshort(f) }
+end
+
+-- common handlers: inlining can be faster but we cache anyway
+-- so we don't bother too much about speed here
+
+local function readfirst(f,offset)
+ if offset then
+ setposition(f,offset)
+ end
+ return { readushort(f) }
+end
+
+local function readarray(f,offset,first)
+ if offset then
+ setposition(f,offset)
+ end
+ local n = readushort(f)
+ if first then
+ local t = { first }
+ for i=2,n do
+ t[i] = readushort(f)
+ end
+ return t, n
+ elseif n > 0 then
+ local t = { }
+ for i=1,n do
+ t[i] = readushort(f)
+ end
+ return t, n
+ end
+end
+
+local function readcoveragearray(f,offset,t,simple)
+ if not t then
+ return nil
+ end
+ local n = #t
+ if n == 0 then
+ return nil
+ end
+ for i=1,n do
+ t[i] = readcoverage(f,offset+t[i],simple)
+ end
+ return t
+end
+
+local function covered(subset,all)
+ local used, u
+ for i=1,#subset do
+ local s = subset[i]
+ if all[s] then
+ if used then
+ u = u + 1
+ used[u] = s
+ else
+ u = 1
+ used = { s }
+ end
+ end
+ end
+ return used
+end
+
+-- We generalize the chained lookups so that we can do with only one handler
+-- when processing them.
+
+local function unchainedcontext(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,what)
+ local tableoffset = lookupoffset + offset
+ setposition(f,tableoffset)
+ local subtype = readushort(f)
+ if subtype == 1 then
+ local coverage = readushort(f)
+ local subclasssets = readarray(f)
+ coverage = readcoverage(f,tableoffset+coverage,true)
+ local rules = { }
+ for i=1,#subclasssets do
+ local offset = subclasssets[i]
+ if offset > 0 then
+ local firstcoverage = coverage[i]
+ local rulesoffset = tableoffset + offset
+ local subclassrules = readarray(f,rulesoffset)
+ for rule=1,#subclassrules do
+ setposition(f,rulesoffset + subclassrules[rule])
+ local nofcurrent = readushort(f)
+ local noflookups = readushort(f)
+ local current = { { firstcoverage } }
+ for i=2,nofcurrent do
+ current[i] = { readushort(f) }
+ end
+ local lookups = { }
+ for i=1,noflookups do
+ lookups[readushort(f)+1] = readushort(f) + 1
+ end
+ rules[#rules+1] = {
+ current = current,
+ lookups = lookups
+ }
+ end
+ end
+ end
+ return {
+ format = "glyphs",
+ rules = rules,
+ }
+ elseif subtype == 2 then
+ -- We expand the classes as later on we do a pack over the whole table so then we get
+ -- back efficiency. This way we can also apply the coverage to the first current.
+ local coverage = readushort(f)
+ local currentclassdef = readushort(f)
+ local subclasssets = readarray(f)
+ coverage = readcoverage(f,tableoffset + coverage)
+ currentclassdef = readclassdef(f,tableoffset + currentclassdef)
+ local currentclasses = classtocoverage(currentclassdef,fontdata.glyphs)
+ local rules = { }
+ for class=1,#subclasssets do
+ local offset = subclasssets[class]
+ if offset > 0 then
+ local firstcoverage = currentclasses[class]
+ if firstcoverage then
+ firstcoverage = covered(firstcoverage,coverage) -- bonus
+ if firstcoverage then
+ local rulesoffset = tableoffset + offset
+ local subclassrules = readarray(f,rulesoffset)
+ for rule=1,#subclassrules do
+ setposition(f,rulesoffset + subclassrules[rule])
+ local nofcurrent = readushort(f)
+ local noflookups = readushort(f)
+ local current = { firstcoverage }
+ for i=2,nofcurrent do
+ current[i] = currentclasses[readushort(f) + 1]
+ end
+ local lookups = { }
+ for i=1,noflookups do
+ lookups[readushort(f)+1] = readushort(f) + 1
+ end
+ rules[#rules+1] = {
+ current = current,
+ lookups = lookups
+ }
+ end
+ else
+ report("no coverage")
+ end
+ else
+ report("no coverage class")
+ end
+ end
+ end
+ return {
+ format = "class",
+ rules = rules,
+ }
+ elseif subtype == 3 then
+ local current = readarray(f)
+ local noflookups = readushort(f)
+ local lookups = { }
+ for i=1,noflookups do
+ lookups[readushort(f)+1] = readushort(f) + 1
+ end
+ current = readcoveragearray(f,tableoffset,current,true)
+ return {
+ format = "coverage",
+ rules = {
+ {
+ current = current,
+ lookups = lookups,
+ }
+ }
+ }
+ else
+ report("unsupported subtype %a in %a %s",subtype,"chainedcontext",what)
+ end
+end
+
+-- todo: optimize for n=1 ?
+
+-- class index needs checking, probably no need for +1
+
+local function chainedcontext(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,what)
+ local tableoffset = lookupoffset + offset
+ setposition(f,tableoffset)
+ local subtype = readushort(f)
+ if subtype == 1 then
+ local coverage = readushort(f)
+ local subclasssets = readarray(f)
+ coverage = readcoverage(f,tableoffset+coverage,true)
+ local rules = { }
+ for i=1,#subclasssets do
+ local offset = subclasssets[i]
+ if offset > 0 then
+ local firstcoverage = coverage[i]
+ local rulesoffset = tableoffset + offset
+ local subclassrules = readarray(f,rulesoffset)
+ for rule=1,#subclassrules do
+ setposition(f,rulesoffset + subclassrules[rule])
+ local nofbefore = readushort(f)
+ local before
+ if nofbefore > 0 then
+ before = { }
+ for i=1,nofbefore do
+ before[i] = { readushort(f) }
+ end
+ end
+ local nofcurrent = readushort(f)
+ local current = { { firstcoverage } }
+ for i=2,nofcurrent do
+ current[i] = { readushort(f) }
+ end
+ local nofafter = readushort(f)
+ local after
+ if nofafter > 0 then
+ after = { }
+ for i=1,nofafter do
+ after[i] = { readushort(f) }
+ end
+ end
+ local noflookups = readushort(f)
+ local lookups = { }
+ for i=1,noflookups do
+ lookups[readushort(f)+1] = readushort(f) + 1
+ end
+ rules[#rules+1] = {
+ before = before,
+ current = current,
+ after = after,
+ lookups = lookups,
+ }
+ end
+ end
+ end
+ return {
+ format = "glyphs",
+ rules = rules,
+ }
+ elseif subtype == 2 then
+ local coverage = readushort(f)
+ local beforeclassdef = readushort(f)
+ local currentclassdef = readushort(f)
+ local afterclassdef = readushort(f)
+ local subclasssets = readarray(f)
+ local coverage = readcoverage(f,tableoffset + coverage)
+ local beforeclassdef = readclassdef(f,tableoffset + beforeclassdef)
+ local currentclassdef = readclassdef(f,tableoffset + currentclassdef)
+ local afterclassdef = readclassdef(f,tableoffset + afterclassdef)
+ local beforeclasses = classtocoverage(beforeclassdef,fontdata.glyphs)
+ local currentclasses = classtocoverage(currentclassdef,fontdata.glyphs)
+ local afterclasses = classtocoverage(afterclassdef,fontdata.glyphs)
+ local rules = { }
+ for class=1,#subclasssets do
+ local offset = subclasssets[class]
+ if offset > 0 then
+ local firstcoverage = currentclasses[class]
+ if firstcoverage then
+ firstcoverage = covered(firstcoverage,coverage) -- bonus
+ if firstcoverage then
+ local rulesoffset = tableoffset + offset
+ local subclassrules = readarray(f,rulesoffset)
+ for rule=1,#subclassrules do
+ -- watch out, in context we first get the counts and then the arrays while
+ -- here we get them mixed
+ setposition(f,rulesoffset + subclassrules[rule])
+ local nofbefore = readushort(f)
+ local before
+ if nofbefore > 0 then
+ before = { }
+ for i=1,nofbefore do
+ before[i] = beforeclasses[readushort(f) + 1]
+ end
+ end
+ local nofcurrent = readushort(f)
+ local current = { firstcoverage }
+ for i=2,nofcurrent do
+ current[i] = currentclasses[readushort(f)+ 1]
+ end
+ local nofafter = readushort(f)
+ local after
+ if nofafter > 0 then
+ after = { }
+ for i=1,nofafter do
+ after[i] = afterclasses[readushort(f) + 1]
+ end
+ end
+ -- no sequence index here (so why in context as it saves nothing)
+ local noflookups = readushort(f)
+ local lookups = { }
+ for i=1,noflookups do
+ lookups[readushort(f)+1] = readushort(f) + 1
+ end
+ rules[#rules+1] = {
+ before = before,
+ current = current,
+ after = after,
+ lookups = lookups,
+ }
+ end
+ else
+ report("no coverage")
+ end
+ else
+ report("class is not covered")
+ end
+ end
+ end
+ return {
+ format = "class",
+ rules = rules,
+ }
+ elseif subtype == 3 then
+ local before = readarray(f)
+ local current = readarray(f)
+ local after = readarray(f)
+ local noflookups = readushort(f)
+ local lookups = { }
+ for i=1,noflookups do
+ lookups[readushort(f)+1] = readushort(f) + 1
+ end
+ before = readcoveragearray(f,tableoffset,before,true)
+ current = readcoveragearray(f,tableoffset,current,true)
+ after = readcoveragearray(f,tableoffset,after,true)
+ return {
+ format = "coverage",
+ rules = {
+ {
+ before = before,
+ current = current,
+ after = after,
+ lookups = lookups,
+ }
+ }
+ }
+ else
+ report("unsupported subtype %a in %a %s",subtype,"chainedcontext",what)
+ end
+end
+
+local function extension(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,types,handlers,what)
+ local tableoffset = lookupoffset + offset
+ setposition(f,tableoffset)
+ local subtype = readushort(f)
+ if subtype == 1 then
+ local lookuptype = types[readushort(f)]
+ local faroffset = readulong(f)
+ local handler = handlers[lookuptype]
+ if handler then
+ -- maybe we can just pass one offset (or tableoffset first)
+ return handler(f,fontdata,lookupid,tableoffset + faroffset,0,glyphs,nofglyphs), lookuptype
+ else
+ report("no handler for lookuptype %a subtype %a in %s %s",lookuptype,subtype,what,"extension")
+ end
+ else
+ report("unsupported subtype %a in %s %s",subtype,what,"extension")
+ end
+end
+
+-- gsub handlers
+
+function gsubhandlers.single(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ local tableoffset = lookupoffset + offset
+ setposition(f,tableoffset)
+ local subtype = readushort(f)
+ if subtype == 1 then
+ local coverage = readushort(f)
+ local delta = readshort(f) -- can be negative
+ local coverage = readcoverage(f,tableoffset+coverage) -- not simple as we need to set key/value anyway
+ for index in next, coverage do
+ local newindex = index + delta
+ if index > nofglyphs or newindex > nofglyphs then
+ report("invalid index in %s format %i: %i -> %i (max %i)","single",subtype,index,newindex,nofglyphs)
+ coverage[index] = nil
+ else
+ coverage[index] = newindex
+ end
+ end
+ return {
+ coverage = coverage
+ }
+ elseif subtype == 2 then -- in streamreader a seek and fetch is faster than a temp table
+ local coverage = readushort(f)
+ local nofreplacements = readushort(f)
+ local replacements = { }
+ for i=1,nofreplacements do
+ replacements[i] = readushort(f)
+ end
+ local coverage = readcoverage(f,tableoffset + coverage) -- not simple as we need to set key/value anyway
+ for index, newindex in next, coverage do
+ newindex = newindex + 1
+ if index > nofglyphs or newindex > nofglyphs then
+ report("invalid index in %s format %i: %i -> %i (max %i)","single",subtype,index,newindex,nofglyphs)
+ coverage[index] = nil
+ else
+ coverage[index] = replacements[newindex]
+ end
+ end
+ return {
+ coverage = coverage
+ }
+ else
+ report("unsupported subtype %a in %a substitution",subtype,"single")
+ end
+end
+
+local function sethandler(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,what)
+ local tableoffset = lookupoffset + offset
+ setposition(f,tableoffset)
+ local subtype = readushort(f)
+ if subtype == 1 then
+ local coverage = readushort(f)
+ local nofsequence = readushort(f)
+ local sequences = { }
+ for i=1,nofsequence do
+ sequences[i] = readushort(f)
+ end
+ for i=1,nofsequence do
+ setposition(f,tableoffset + sequences[i])
+ local n = readushort(f)
+ local s = { }
+ for i=1,n do
+ s[i] = readushort(f)
+ end
+ sequences[i] = s
+ end
+ local coverage = readcoverage(f,tableoffset + coverage)
+ for index, newindex in next, coverage do
+ newindex = newindex + 1
+ if index > nofglyphs or newindex > nofglyphs then
+ report("invalid index in %s format %i: %i -> %i (max %i)",what,subtype,index,newindex,nofglyphs)
+ coverage[index] = nil
+ else
+ coverage[index] = sequences[newindex]
+ end
+ end
+ return {
+ coverage = coverage
+ }
+ else
+ report("unsupported subtype %a in %a substitution",subtype,what)
+ end
+end
+
+function gsubhandlers.multiple(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ return sethandler(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,"multiple")
+end
+
+function gsubhandlers.alternate(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ return sethandler(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,"alternate")
+end
+
+function gsubhandlers.ligature(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ local tableoffset = lookupoffset + offset
+ setposition(f,tableoffset)
+ local subtype = readushort(f)
+ if subtype == 1 then
+ local coverage = readushort(f)
+ local nofsets = readushort(f)
+ local ligatures = { }
+ for i=1,nofsets do
+ ligatures[i] = readushort(f)
+ end
+ for i=1,nofsets do
+ local offset = lookupoffset + offset + ligatures[i]
+ setposition(f,offset)
+ local n = readushort(f)
+ local l = { }
+ for i=1,n do
+ l[i] = offset + readushort(f)
+ end
+ ligatures[i] = l
+ end
+ local coverage = readcoverage(f,tableoffset + coverage)
+ for index, newindex in next, coverage do
+ local hash = { }
+ local ligatures = ligatures[newindex+1]
+ for i=1,#ligatures do
+ local offset = ligatures[i]
+ setposition(f,offset)
+ local lig = readushort(f)
+ local cnt = readushort(f)
+ local hsh = hash
+ for i=2,cnt do
+ local c = readushort(f)
+ local h = hsh[c]
+ if not h then
+ h = { }
+ hsh[c] = h
+ end
+ hsh = h
+ end
+ hsh.ligature = lig
+ end
+ coverage[index] = hash
+ end
+ return {
+ coverage = coverage
+ }
+ else
+ report("unsupported subtype %a in %a substitution",subtype,"ligature")
+ end
+end
+
+function gsubhandlers.context(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ return unchainedcontext(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,"substitution"), "context"
+end
+
+function gsubhandlers.chainedcontext(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ return chainedcontext(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,"substitution"), "chainedcontext"
+end
+
+function gsubhandlers.extension(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ return extension(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,gsubtypes,gsubhandlers,"substitution")
+end
+
+function gsubhandlers.reversechainedcontextsingle(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ local tableoffset = lookupoffset + offset
+ setposition(f,tableoffset)
+ local subtype = readushort(f)
+ if subtype == 1 then -- NEEDS CHECKING
+ local current = readfirst(f)
+ local before = readarray(f)
+ local after = readarray(f)
+ local replacements = readarray(f)
+ current = readcoveragearray(f,tableoffset,current,true)
+ before = readcoveragearray(f,tableoffset,before,true)
+ after = readcoveragearray(f,tableoffset,after,true)
+ return {
+ coverage = {
+ format = "reversecoverage", -- reversesub
+ before = before,
+ current = current,
+ after = after,
+ replacements = replacements,
+ }
+ }, "reversechainedcontextsingle"
+ else
+ report("unsupported subtype %a in %a substitution",subtype,"reversechainedcontextsingle")
+ end
+end
+
+-- gpos handlers
+
+local function readpairsets(f,tableoffset,sets,format1,format2)
+ local done = { }
+ for i=1,#sets do
+ local offset = sets[i]
+ local reused = done[offset]
+ if not reused then
+ setposition(f,tableoffset + offset)
+ local n = readushort(f)
+ reused = { }
+ for i=1,n do
+ reused[i] = {
+ readushort(f), -- second glyph id
+ readposition(f,format1),
+ readposition(f,format2)
+ }
+ end
+ done[offset] = reused
+ end
+ sets[i] = reused
+ end
+ return sets
+end
+
+local function readpairclasssets(f,nofclasses1,nofclasses2,format1,format2)
+ local classlist1 = { }
+ for i=1,nofclasses1 do
+ local classlist2 = { }
+ classlist1[i] = classlist2
+ for j=1,nofclasses2 do
+ local one = readposition(f,format1)
+ local two = readposition(f,format2)
+ if one or two then
+ classlist2[j] = { one, two }
+ else
+ classlist2[j] = false
+ end
+ end
+ end
+ return classlist1
+end
+
+-- no real gain in kerns as we pack
+
+function gposhandlers.single(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ local tableoffset = lookupoffset + offset
+ setposition(f,tableoffset)
+ local subtype = readushort(f)
+ if subtype == 1 then
+ local coverage = readushort(f)
+ local format = readushort(f)
+ local value = readposition(f,format)
+ local coverage = readcoverage(f,tableoffset+coverage)
+ for index, newindex in next, coverage do
+ coverage[index] = value
+ end
+ return {
+ format = "pair",
+ coverage = coverage
+ }
+ elseif subtype == 2 then
+ local coverage = readushort(f)
+ local format = readushort(f)
+ local values = { }
+ local nofvalues = readushort(f)
+ for i=1,nofvalues do
+ values[i] = readposition(f,format)
+ end
+ local coverage = readcoverage(f,tableoffset+coverage)
+ for index, newindex in next, coverage do
+ coverage[index] = values[newindex+1]
+ end
+ return {
+ format = "pair",
+ coverage = coverage
+ }
+ else
+ report("unsupported subtype %a in %a positioning",subtype,"single")
+ end
+end
+
+-- this needs checking! if no second pair then another advance over the list
+
+-- ValueFormat1 applies to the ValueRecord of the first glyph in each pair. ValueRecords for all first glyphs must use ValueFormat1. If ValueFormat1 is set to zero (0), the corresponding glyph has no ValueRecord and, therefore, should not be repositioned.
+-- ValueFormat2 applies to the ValueRecord of the second glyph in each pair. ValueRecords for all second glyphs must use ValueFormat2. If ValueFormat2 is set to null, then the second glyph of the pair is the “next” glyph for which a lookup should be performed.
+
+-- !!!!! this needs checking: when both false, we have no hit so then we might need to fall through
+
+function gposhandlers.pair(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ local tableoffset = lookupoffset + offset
+ setposition(f,tableoffset)
+ local subtype = readushort(f)
+ if subtype == 1 then
+ local coverage = readushort(f)
+ local format1 = readushort(f)
+ local format2 = readushort(f)
+ local sets = readarray(f)
+ sets = readpairsets(f,tableoffset,sets,format1,format2)
+ coverage = readcoverage(f,tableoffset + coverage)
+ for index, newindex in next, coverage do
+ local set = sets[newindex+1]
+ local hash = { }
+ for i=1,#set do
+ local value = set[i]
+ if value then
+ local other = value[1]
+ local first = value[2]
+ local second = value[3]
+ if first or second then
+ hash[other] = { first, second } -- needs checking
+ else
+ hash[other] = nil
+ end
+ end
+ end
+ coverage[index] = hash
+ end
+ return {
+ format = "pair",
+ coverage = coverage
+ }
+ elseif subtype == 2 then
+ local coverage = readushort(f)
+ local format1 = readushort(f)
+ local format2 = readushort(f)
+ local classdef1 = readushort(f)
+ local classdef2 = readushort(f)
+ local nofclasses1 = readushort(f) -- incl class 0
+ local nofclasses2 = readushort(f) -- incl class 0
+ local classlist = readpairclasssets(f,nofclasses1,nofclasses2,format1,format2)
+ coverage = readcoverage(f,tableoffset+coverage,true)
+ classdef1 = readclassdef(f,tableoffset+classdef1)
+ classdef2 = readclassdef(f,tableoffset+classdef2)
+ local usedcoverage = { }
+ for g1, c1 in next, classdef1 do
+ if coverage[g1] then
+ local l1 = classlist[c1]
+ if l1 then
+ local hash = { }
+ for paired, class in next, classdef2 do
+ local offsets = l1[class]
+ if offsets then
+ local first = offsets[1]
+ local second = offsets[2]
+ if first or second then
+ hash[paired] = { first, second }
+ else
+ -- upto the next lookup for this combination
+ end
+ end
+ end
+ usedcoverage[g1] = hash
+ end
+ end
+ end
+ return {
+ format = "pair",
+ coverage = usedcoverage
+ }
+ elseif subtype == 3 then
+ report("yet unsupported subtype %a in %a positioning",subtype,"pair")
+ else
+ report("unsupported subtype %a in %a positioning",subtype,"pair")
+ end
+end
+
+function gposhandlers.cursive(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ local tableoffset = lookupoffset + offset
+ setposition(f,tableoffset)
+ local subtype = readushort(f)
+ if subtype == 1 then
+ local coverage = tableoffset + readushort(f)
+ local nofrecords = readushort(f)
+ local records = { }
+ for i=1,nofrecords do
+ local entry = readushort(f)
+ local exit = readushort(f)
+ records[i] = {
+ entry = entry ~= 0 and (tableoffset + entry) or false,
+ exit = exit ~= 0 and (tableoffset + exit ) or false,
+ }
+ end
+ coverage = readcoverage(f,coverage)
+ for i=1,nofrecords do
+ local r = records[i]
+ records[i] = {
+ 1, -- will become hash after loading (must be unique per lookup when packed)
+ readanchor(f,r.entry) or nil,
+ readanchor(f,r.exit ) or nil,
+ }
+ end
+ for index, newindex in next, coverage do
+ coverage[index] = records[newindex+1]
+ end
+ return {
+ coverage = coverage
+ }
+ else
+ report("unsupported subtype %a in %a positioning",subtype,"cursive")
+ end
+end
+
+local function handlemark(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,ligature)
+ local tableoffset = lookupoffset + offset
+ setposition(f,tableoffset)
+ local subtype = readushort(f)
+ if subtype == 1 then
+ -- we are one based, not zero
+ local markcoverage = tableoffset + readushort(f)
+ local basecoverage = tableoffset + readushort(f)
+ local nofclasses = readushort(f)
+ local markoffset = tableoffset + readushort(f)
+ local baseoffset = tableoffset + readushort(f)
+ --
+ local markcoverage = readcoverage(f,markcoverage)
+ local basecoverage = readcoverage(f,basecoverage,true)
+ --
+ setposition(f,markoffset)
+ local markclasses = { }
+ local nofmarkclasses = readushort(f)
+ --
+ local lastanchor = fontdata.lastanchor or 0
+ local usedanchors = { }
+ --
+-- local placeholder = (fontdata.markcount or 0) + 1
+-- fontdata.markcount = placeholder
+-- placeholder = "m" .. placeholder
+ --
+ for i=1,nofmarkclasses do
+ local class = readushort(f) + 1
+ local offset = readushort(f)
+ if offset == 0 then
+ markclasses[i] = false
+ else
+-- markclasses[i] = { placeholder, class, markoffset + offset }
+ markclasses[i] = { class, markoffset + offset }
+ end
+ usedanchors[class] = true
+ end
+ for i=1,nofmarkclasses do
+ local mc = markclasses[i]
+ if mc then
+-- mc[3] = readanchor(f,mc[3])
+ mc[2] = readanchor(f,mc[2])
+ end
+ end
+ --
+ setposition(f,baseoffset)
+ local nofbaserecords = readushort(f)
+ local baserecords = { }
+ --
+ if ligature then
+ -- 3 components
+ -- 1 : class .. nofclasses -- NULL when empty
+ -- 2 : class .. nofclasses -- NULL when empty
+ -- 3 : class .. nofclasses -- NULL when empty
+ for i=1,nofbaserecords do -- here i is the class
+ local offset = readushort(f)
+ if offset == 0 then
+ baserecords[i] = false
+ else
+ baserecords[i] = baseoffset + offset
+ end
+ end
+ for i=1,nofbaserecords do
+ local recordoffset = baserecords[i]
+ if recordoffset then
+ setposition(f,recordoffset)
+ local nofcomponents = readushort(f)
+ local components = { }
+ for i=1,nofcomponents do
+ local classes = { }
+ for i=1,nofclasses do
+ local offset = readushort(f)
+ if offset ~= 0 then
+ classes[i] = recordoffset + offset
+ else
+ classes[i] = false
+ end
+ end
+ components[i] = classes
+ end
+ baserecords[i] = components
+ end
+ end
+ local baseclasses = { } -- setmetatableindex("table")
+ for i=1,nofclasses do
+ baseclasses[i] = { }
+ end
+ for i=1,nofbaserecords do
+ local components = baserecords[i]
+ local b = basecoverage[i]
+ if components then
+ for c=1,#components do
+ local classes = components[i]
+ if classes then
+ for i=1,nofclasses do
+ local anchor = readanchor(f,classes[i])
+ local bclass = baseclasses[i]
+ local bentry = bclass[b]
+ if bentry then
+ bentry[c] = anchor
+ else
+ bclass[b]= { [c] = anchor }
+ end
+ end
+ end
+ components[i] = classes
+ end
+ end
+ end
+ for index, newindex in next, markcoverage do
+ markcoverage[index] = markclasses[newindex+1] or nil
+ end
+ return {
+ format = "ligature",
+ baseclasses = baseclasses,
+ coverage = markcoverage,
+ }
+ else
+ for i=1,nofbaserecords do
+ local r = { }
+ for j=1,nofclasses do
+ local offset = readushort(f)
+ if offset == 0 then
+ r[j] = false
+ else
+ r[j] = baseoffset + offset
+ end
+ end
+ baserecords[i] = r
+ end
+ local baseclasses = { } -- setmetatableindex("table")
+ for i=1,nofclasses do
+ baseclasses[i] = { }
+ end
+ for i=1,nofbaserecords do
+ local r = baserecords[i]
+ local b = basecoverage[i]
+ for j=1,nofclasses do
+ baseclasses[j][b] = readanchor(f,r[j])
+ end
+ end
+ for index, newindex in next, markcoverage do
+ markcoverage[index] = markclasses[newindex+1] or nil
+ end
+ -- we could actually already calculate the displacement if we want
+ return {
+ format = "base",
+ baseclasses = baseclasses,
+ coverage = markcoverage,
+ }
+ end
+ else
+ report("unsupported subtype %a in",subtype)
+ end
+
+end
+
+function gposhandlers.marktobase(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ return handlemark(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+end
+
+function gposhandlers.marktoligature(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ return handlemark(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,true)
+end
+
+function gposhandlers.marktomark(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ return handlemark(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+end
+
+function gposhandlers.context(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ return unchainedcontext(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,"positioning"), "context"
+end
+
+function gposhandlers.chainedcontext(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ return chainedcontext(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,"positioning"), "chainedcontext"
+end
+
+function gposhandlers.extension(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ return extension(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,gpostypes,gposhandlers,"positioning")
+end
+
+-- main loader
+
+do
+
+ local plugins = { }
+
+ function plugins.size(f,fontdata,tableoffset,parameters)
+ if not fontdata.designsize then
+ setposition(f,tableoffset+parameters)
+ local designsize = readushort(f)
+ if designsize > 0 then
+ fontdata.designsize = designsize
+ skipshort(f,2)
+ fontdata.minsize = readushort(f)
+ fontdata.maxsize = readushort(f)
+ end
+ end
+ end
+
+ -- feature order needs checking ... as we loop over a hash
+
+ local function reorderfeatures(fontdata,scripts,features)
+ local scriptlangs = { }
+ local featurehash = { }
+ local featureorder = { }
+ for script, languages in next, scripts do
+ for language, record in next, languages do
+ local hash = { }
+ local list = record.featureindices
+ for k=1,#list do
+ local index = list[k]
+ local feature = features[index]
+ local lookups = feature.lookups
+ local tag = feature.tag
+ if tag then
+ hash[tag] = true
+ end
+ if lookups then
+ for i=1,#lookups do
+ local lookup = lookups[i]
+ local o = featureorder[lookup]
+ if o then
+ local okay = true
+ for i=1,#o do
+ if o[i] == tag then
+ okay = false
+ break
+ end
+ end
+ if okay then
+ o[#o+1] = tag
+ end
+ else
+ featureorder[lookup] = { tag }
+ end
+ local f = featurehash[lookup]
+ if f then
+ local h = f[tag]
+ if h then
+ local s = h[script]
+ if s then
+ s[language] = true
+ else
+ h[script] = { [language] = true }
+ end
+ else
+ f[tag] = { [script] = { [language] = true } }
+ end
+ else
+ featurehash[lookup] = { [tag] = { [script] = { [language] = true } } }
+ end
+ --
+ local h = scriptlangs[tag]
+ if h then
+ local s = h[script]
+ if s then
+ s[language] = true
+ else
+ h[script] = { [language] = true }
+ end
+ else
+ scriptlangs[tag] = { [script] = { [language] = true } }
+ end
+ end
+ end
+ end
+ end
+ end
+ return scriptlangs, featurehash, featureorder
+ end
+
+ local function readscriplan(f,fontdata,scriptoffset)
+ setposition(f,scriptoffset)
+ local nofscripts = readushort(f)
+ local scripts = { }
+ for i=1,nofscripts do
+ scripts[readtag(f)] = scriptoffset + readushort(f)
+ end
+ -- script list -> language system info
+ local languagesystems = setmetatableindex("table")
+ for script, offset in next, scripts do
+ setposition(f,offset)
+ local defaultoffset = readushort(f)
+ local noflanguages = readushort(f)
+ local languages = { }
+ if defaultoffset > 0 then
+ languages.dflt = languagesystems[offset + defaultoffset]
+ end
+ for i=1,noflanguages do
+ local language = readtag(f)
+ local offset = offset + readushort(f)
+ languages[language] = languagesystems[offset]
+ end
+ scripts[script] = languages
+ end
+ -- script list -> language system info -> feature list
+ for offset, usedfeatures in next, languagesystems do
+ if offset > 0 then
+ setposition(f,offset)
+ local featureindices = { }
+ usedfeatures.featureindices = featureindices
+ usedfeatures.lookuporder = readushort(f) -- reserved, not used (yet)
+ usedfeatures.requiredindex = readushort(f) -- relates to required (can be 0xFFFF)
+ local noffeatures = readushort(f)
+ for i=1,noffeatures do
+ featureindices[i] = readushort(f) + 1
+ end
+ end
+ end
+ return scripts
+ end
+
+ local function readfeatures(f,fontdata,featureoffset)
+ setposition(f,featureoffset)
+ local features = { }
+ local noffeatures = readushort(f)
+ for i=1,noffeatures do
+ -- also shared?
+ features[i] = {
+ tag = readtag(f),
+ offset = readushort(f)
+ }
+ end
+ --
+ for i=1,noffeatures do
+ local feature = features[i]
+ local offset = featureoffset+feature.offset
+ setposition(f,offset)
+ local parameters = readushort(f) -- feature.parameters
+ local noflookups = readushort(f)
+ if noflookups > 0 then
+ local lookups = { }
+ feature.lookups = lookups
+ for j=1,noflookups do
+ lookups[j] = readushort(f) + 1
+ end
+ end
+ if parameters > 0 then
+ feature.parameters = parameters
+ local plugin = plugins[feature.tag]
+ if plugin then
+ plugin(f,fontdata,offset,parameters)
+ end
+ end
+ end
+ return features
+ end
+
+ local function readlookups(f,lookupoffset,lookuptypes,featurehash,featureorder)
+ setposition(f,lookupoffset)
+ local lookups = { }
+ local noflookups = readushort(f)
+ for i=1,noflookups do
+ lookups[i] = readushort(f)
+ end
+ for lookupid=1,noflookups do
+ local index = lookups[lookupid]
+ setposition(f,lookupoffset+index)
+ local subtables = { }
+ local typebits = readushort(f)
+ local flagbits = readushort(f)
+ local lookuptype = lookuptypes[typebits]
+ local lookupflags = lookupflags[flagbits]
+ local nofsubtables = readushort(f)
+ for j=1,nofsubtables do
+ local offset = readushort(f)
+ subtables[j] = offset + index -- we can probably put lookupoffset here
+ end
+ -- which one wins?
+ local markclass = bittest(flagbits,0x0010) -- usemarkfilteringset
+ if markclass then
+ markclass = readushort(f) -- + 1
+ end
+ local markset = rshift(flagbits,8)
+ if markset > 0 then
+ markclass = markset -- + 1
+ end
+ lookups[lookupid] = {
+ type = lookuptype,
+ chain = chaindirections[lookuptype] or nil,
+ flags = lookupflags,
+ name = lookupid,
+ subtables = subtables,
+ markclass = markclass,
+ features = featurehash[lookupid], -- not if extension
+ order = featureorder[lookupid],
+ }
+ end
+ return lookups
+ end
+
+ local function readscriptoffsets(f,fontdata,tableoffset)
+ if not tableoffset then
+ return
+ end
+ setposition(f,tableoffset)
+ local version = readulong(f)
+ if version ~= 0x00010000 then
+ report("table version %a of %a is not supported (yet), maybe font %s is bad",version,what,fontdata.filename)
+ return
+ end
+ --
+ return tableoffset + readushort(f), tableoffset + readushort(f), tableoffset + readushort(f)
+ end
+
+ local f_lookupname = formatters["%s_%s_%s"]
+
+ local function resolvelookups(f,lookupoffset,fontdata,lookups,lookuptypes,lookuphandlers,what)
+
+ local sequences = fontdata.sequences or { }
+ local sublookuplist = fontdata.sublookups or { }
+ fontdata.sequences = sequences
+ fontdata.sublookups = sublookuplist
+ local nofsublookups = #sublookuplist
+ local nofsequences = #sequences -- 0
+ local lastsublookup = nofsublookups
+ local lastsequence = nofsequences
+ local lookupnames = lookupnames[what]
+ local sublookuphash = { }
+ local sublookupcheck = { }
+ local glyphs = fontdata.glyphs
+ local nofglyphs = fontdata.nofglyphs or #glyphs
+ local noflookups = #lookups
+ local lookupprefix = sub(what,1,1)
+ --
+ for lookupid=1,noflookups do
+ local lookup = lookups[lookupid]
+ local lookuptype = lookup.type
+ local subtables = lookup.subtables
+ local features = lookup.features
+ local handler = lookuphandlers[lookuptype]
+ if handler then
+ local nofsubtables = #subtables
+ local order = lookup.order
+ local flags = lookup.flags
+ local markclass = lookup.markclass
+ if nofsubtables > 0 then
+ local steps = { }
+ local nofsteps = 0
+ local oldtype = nil
+ for s=1,nofsubtables do
+ local step, lt = handler(f,fontdata,lookupid,lookupoffset,subtables[s],glyphs,nofglyphs)
+ if lt then
+ lookuptype = lt
+ if oldtype and lt ~= oldtype then
+ report("messy %s lookup type %a and %a",what,lookuptype,oldtype)
+ end
+ oldtype = lookuptype
+ end
+ if not step then
+ report("unsupported %s lookup type %a",what,lookuptype)
+ else
+ nofsteps = nofsteps + 1
+ steps[nofsteps] = step
+ local rules = step.rules
+ if rules then
+ for i=1,#rules do
+ local rule = rules[i]
+ local before = rule.before
+ local current = rule.current
+ local after = rule.after
+ if before then
+ for i=1,#before do
+ before[i] = tohash(before[i])
+ end
+ -- as with original ctx ff loader
+ rule.before = reversed(before)
+ end
+ if current then
+ for i=1,#current do
+ current[i] = tohash(current[i])
+ end
+ end
+ if after then
+ for i=1,#after do
+ after[i] = tohash(after[i])
+ end
+ end
+ end
+ end
+ end
+ end
+ if nofsteps ~= nofsubtables then
+ report("bogus subtables removed in %s lookup type %a",what,lookuptype)
+ end
+ lookuptype = lookupnames[lookuptype] or lookuptype
+ if features then
+ nofsequences = nofsequences + 1
+ -- report("registering %i as sequence step %i",lookupid,nofsequences)
+ local l = {
+ index = nofsequences,
+ name = f_lookupname(lookupprefix,"s",lookupid+lookupidoffset),
+ steps = steps,
+ nofsteps = nofsteps,
+ type = lookuptype,
+ markclass = markclass or nil,
+ flags = flags,
+ order = order,
+ features = features,
+ }
+ sequences[nofsequences] = l
+ lookup.done = l
+ else
+ nofsublookups = nofsublookups + 1
+ -- report("registering %i as sublookup %i",lookupid,nofsublookups)
+ local l = {
+ index = nofsublookups,
+ name = f_lookupname(lookupprefix,"l",lookupid+lookupidoffset),
+ steps = steps,
+ nofsteps = nofsteps,
+ type = lookuptype,
+ markclass = markclass or nil,
+ flags = flags,
+ }
+ sublookuplist[nofsublookups] = l
+ sublookuphash[lookupid] = nofsublookups
+ sublookupcheck[lookupid] = 0
+ lookup.done = l
+ end
+ else
+ report("no subtables for lookup %a",lookupid)
+ end
+ else
+ report("no handler for lookup %a with type %a",lookupid,lookuptype)
+ end
+ end
+
+ -- When we have a context, we have sublookups that resolve into lookups for which we need to
+ -- know the type. We split the main lookuptable in two parts: sequences (the main lookups)
+ -- and subtable lookups (simple specs with no features). We could keep them merged and might do
+ -- that once we only use this loader. Then we can also move the simple specs into the sequence.
+ -- After all, we pack afterwards.
+
+ local reported = { }
+
+ for i=lastsequence+1,nofsequences do
+ local sequence = sequences[i]
+ local steps = sequence.steps
+ for i=1,#steps do
+ local step = steps[i]
+ local rules = step.rules
+ if rules then
+ for i=1,#rules do
+ local rule = rules[i]
+ local rlookups = rule.lookups
+ if not rlookups then
+ local name = sequence.name
+ if not reported[name] then
+ report("rule %i in %s lookup %a has %s lookups",i,what,name,"no")
+ reported[name] = true
+ end
+ elseif not next(rlookups) then
+ local name = sequence.name
+ if not reported[name] then
+ report("rule %i in %s lookup %a has %s lookups",i,what,name,"empty")
+ reported[name] = true
+ end
+ rule.lookups = nil
+ else
+ for index, lookupid in sortedhash(rlookups) do -- nicer
+ local h = sublookuphash[lookupid]
+ if not h then
+ -- here we have a lookup that is used independent as well
+ -- as in another one
+ nofsublookups = nofsublookups + 1
+ -- report("registering %i as sublookup %i",lookupid,nofsublookups)
+ local d = lookups[lookupid].done
+ h = {
+ index = nofsublookups, -- handy for tracing
+ name = f_lookupname(lookupprefix,"d",lookupid+lookupidoffset),
+ derived = true, -- handy for tracing
+ steps = d.steps,
+ nofsteps = d.nofsteps,
+ type = d.lookuptype,
+ markclass = d.markclass or nil,
+ flags = d.flags,
+ }
+ sublookuplist[nofsublookups] = h
+ sublookuphash[lookupid] = nofsublookups
+ sublookupcheck[lookupid] = 1
+ else
+ sublookupcheck[lookupid] = sublookupcheck[lookupid] + 1
+ end
+ rlookups[index] = h
+ end
+ end
+ end
+ end
+ end
+ end
+
+ for i, n in sortedhash(sublookupcheck) do
+ if n == 0 then
+ report("%s lookup %i is not used",what,i) -- lookups[i].done.lookupid
+ end
+ end
+
+ end
+
+ local function readscripts(f,fontdata,what,lookuptypes,lookuphandlers,lookupstoo)
+ local datatable = fontdata.tables[what]
+ if not datatable then
+ return
+ end
+ local tableoffset = datatable.offset
+ if not tableoffset then
+ return
+ end
+ local scriptoffset, featureoffset, lookupoffset = readscriptoffsets(f,fontdata,tableoffset)
+ if not scriptoffset then
+ return
+ end
+ --
+ local scripts = readscriplan(f,fontdata,scriptoffset)
+ local features = readfeatures(f,fontdata,featureoffset)
+ --
+ local scriptlangs, featurehash, featureorder = reorderfeatures(fontdata,scripts,features)
+ --
+ if fontdata.features then
+ fontdata.features[what] = scriptlangs
+ else
+ fontdata.features = { [what] = scriptlangs }
+ end
+ --
+ if not lookupstoo then
+ return
+ end
+ --
+ local lookups = readlookups(f,lookupoffset,lookuptypes,featurehash,featureorder)
+ --
+ if lookups then
+ resolvelookups(f,lookupoffset,fontdata,lookups,lookuptypes,lookuphandlers,what)
+ end
+ end
+
+ local function checkkerns(f,fontdata,specification)
+ local features = fontdata.features
+ local gposfeatures = features and features.gpos
+ if not gposfeatures or not gposfeatures.kern then
+ local datatable = fontdata.tables.kern
+ if datatable then
+ report("adding global kern table as gpos feature")
+ setposition(f,datatable.offset)
+ local version = readushort(f)
+ local noftables = readushort(f)
+ local kerns = setmetatableindex("table")
+ for i=1,noftables do
+ local version = readushort(f)
+ local length = readushort(f)
+ local coverage = readushort(f)
+ -- bit 8-15 of coverage: format 0 or 2
+ local format = bit32.rshift(coverage,8) -- is this ok?
+ if format == 0 then
+ local nofpairs = readushort(f)
+ local searchrange = readushort(f)
+ local entryselector = readushort(f)
+ local rangeshift = readushort(f)
+ for i=1,nofpairs do
+ kerns[readushort(f)][readushort(f)] = readfword(f)
+ end
+ elseif format == 2 then
+ -- apple specific so let's ignore it
+ else
+ -- not supported by ms
+ end
+ end
+ local feature = { dflt = { dflt = true } }
+ if not features then
+ fontdata.features = { gpos = { kern = feature } }
+ elseif not gposfeatures then
+ fontdata.features.gpos = { kern = feature }
+ else
+ gposfeatures.kern = feature
+ end
+ local sequences = fontdata.sequences
+ if not sequences then
+ sequences = { }
+ fontdata.sequences = sequences
+ end
+ local nofsequences = #sequences + 1
+ sequences[nofsequences] = {
+ index = nofsequences,
+ name = "kern",
+ steps = {
+ {
+ coverage = kerns,
+ format = "kern",
+ },
+ },
+ nofsteps = 1,
+ type = "gpos_pair",
+ flags = { false, false, false, false },
+ order = { "kern" },
+ features = { kern = feature },
+ }
+ end
+ end
+ end
+
+ function readers.gsub(f,fontdata,specification)
+ if specification.details then
+ readscripts(f,fontdata,"gsub",gsubtypes,gsubhandlers,specification.lookups)
+ end
+ end
+
+ function readers.gpos(f,fontdata,specification)
+ if specification.details then
+ readscripts(f,fontdata,"gpos",gpostypes,gposhandlers,specification.lookups)
+ if specification.lookups then
+ checkkerns(f,fontdata,specification)
+ end
+ end
+ end
+
+end
+
+function readers.gdef(f,fontdata,specification)
+ if specification.glyphs then
+ local datatable = fontdata.tables.gdef
+ if datatable then
+ local tableoffset = datatable.offset
+ setposition(f,tableoffset)
+ local version = readulong(f)
+ local classoffset = tableoffset + readushort(f)
+ local attachmentoffset = tableoffset + readushort(f) -- used for bitmaps
+ local ligaturecarets = tableoffset + readushort(f) -- used in editors (maybe nice for tracing)
+ local markclassoffset = tableoffset + readushort(f)
+ local marksetsoffset = version == 0x00010002 and (tableoffset + readushort(f))
+ local glyphs = fontdata.glyphs
+ local marks = { }
+ local markclasses = setmetatableindex("table")
+ local marksets = setmetatableindex("table")
+ fontdata.marks = marks
+ fontdata.markclasses = markclasses
+ fontdata.marksets = marksets
+ -- class definitions
+ setposition(f,classoffset)
+ local classformat = readushort(f)
+ if classformat == 1 then
+ local firstindex = readushort(f)
+ local lastindex = firstindex + readushort(f) - 1
+ for index=firstindex,lastindex do
+ local class = classes[readushort(f)]
+ if class == "mark" then
+ marks[index] = true
+ end
+ glyphs[index].class = class
+ end
+ elseif classformat == 2 then
+ local nofranges = readushort(f)
+ for i=1,nofranges do
+ local firstindex = readushort(f)
+ local lastindex = readushort(f)
+ local class = classes[readushort(f)]
+ if class then
+ for index=firstindex,lastindex do
+ glyphs[index].class = class
+ if class == "mark" then
+ marks[index] = true
+ end
+ end
+ end
+ end
+ end
+ -- mark classes
+ setposition(f,markclassoffset)
+ local classformat = readushort(f)
+ if classformat == 1 then
+ local firstindex = readushort(f)
+ local lastindex = firstindex + readushort(f) - 1
+ for index=firstindex,lastindex do
+ markclasses[readushort(f)][index] = true
+ end
+ elseif classformat == 2 then
+ local nofranges = readushort(f)
+ for i=1,nofranges do
+ local firstindex = readushort(f)
+ local lastindex = readushort(f)
+ local class = markclasses[readushort(f)]
+ for index=firstindex,lastindex do
+ class[index] = true
+ end
+ end
+ end
+ -- mark sets : todo: just make the same as class sets above
+ if marksetsoffset then
+ setposition(f,marksetsoffset)
+ local format = readushort(f)
+ if format == 1 then
+ local nofsets = readushort(f)
+ local sets = { }
+ for i=1,nofsets do
+ sets[i] = readulong(f)
+ end
+ -- somehow this fails on e.g. notosansethiopic-bold.ttf
+ for i=1,nofsets do
+ local offset = sets[i]
+ if offset ~= 0 then
+ marksets[i] = readcoverage(f,offset)
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
+-- We keep this code here instead of font-otm.lua because we need coverage
+-- helpers. Okay, these helpers could go to the main reader file some day.
+
+local function readmathvalue(f)
+ local v = readshort(f)
+ skipshort(f,1) -- offset to device table
+ return v
+end
+
+local function readmathconstants(f,fontdata,offset)
+ setposition(f,offset)
+ fontdata.mathconstants = {
+ ScriptPercentScaleDown = readshort(f),
+ ScriptScriptPercentScaleDown = readshort(f),
+ DelimitedSubFormulaMinHeight = readushort(f),
+ DisplayOperatorMinHeight = readushort(f),
+ MathLeading = readmathvalue(f),
+ AxisHeight = readmathvalue(f),
+ AccentBaseHeight = readmathvalue(f),
+ FlattenedAccentBaseHeight = readmathvalue(f),
+ SubscriptShiftDown = readmathvalue(f),
+ SubscriptTopMax = readmathvalue(f),
+ SubscriptBaselineDropMin = readmathvalue(f),
+ SuperscriptShiftUp = readmathvalue(f),
+ SuperscriptShiftUpCramped = readmathvalue(f),
+ SuperscriptBottomMin = readmathvalue(f),
+ SuperscriptBaselineDropMax = readmathvalue(f),
+ SubSuperscriptGapMin = readmathvalue(f),
+ SuperscriptBottomMaxWithSubscript = readmathvalue(f),
+ SpaceAfterScript = readmathvalue(f),
+ UpperLimitGapMin = readmathvalue(f),
+ UpperLimitBaselineRiseMin = readmathvalue(f),
+ LowerLimitGapMin = readmathvalue(f),
+ LowerLimitBaselineDropMin = readmathvalue(f),
+ StackTopShiftUp = readmathvalue(f),
+ StackTopDisplayStyleShiftUp = readmathvalue(f),
+ StackBottomShiftDown = readmathvalue(f),
+ StackBottomDisplayStyleShiftDown = readmathvalue(f),
+ StackGapMin = readmathvalue(f),
+ StackDisplayStyleGapMin = readmathvalue(f),
+ StretchStackTopShiftUp = readmathvalue(f),
+ StretchStackBottomShiftDown = readmathvalue(f),
+ StretchStackGapAboveMin = readmathvalue(f),
+ StretchStackGapBelowMin = readmathvalue(f),
+ FractionNumeratorShiftUp = readmathvalue(f),
+ FractionNumeratorDisplayStyleShiftUp = readmathvalue(f),
+ FractionDenominatorShiftDown = readmathvalue(f),
+ FractionDenominatorDisplayStyleShiftDown = readmathvalue(f),
+ FractionNumeratorGapMin = readmathvalue(f),
+ FractionNumeratorDisplayStyleGapMin = readmathvalue(f),
+ FractionRuleThickness = readmathvalue(f),
+ FractionDenominatorGapMin = readmathvalue(f),
+ FractionDenominatorDisplayStyleGapMin = readmathvalue(f),
+ SkewedFractionHorizontalGap = readmathvalue(f),
+ SkewedFractionVerticalGap = readmathvalue(f),
+ OverbarVerticalGap = readmathvalue(f),
+ OverbarRuleThickness = readmathvalue(f),
+ OverbarExtraAscender = readmathvalue(f),
+ UnderbarVerticalGap = readmathvalue(f),
+ UnderbarRuleThickness = readmathvalue(f),
+ UnderbarExtraDescender = readmathvalue(f),
+ RadicalVerticalGap = readmathvalue(f),
+ RadicalDisplayStyleVerticalGap = readmathvalue(f),
+ RadicalRuleThickness = readmathvalue(f),
+ RadicalExtraAscender = readmathvalue(f),
+ RadicalKernBeforeDegree = readmathvalue(f),
+ RadicalKernAfterDegree = readmathvalue(f),
+ RadicalDegreeBottomRaisePercent = readshort(f),
+ }
+end
+
+local function readmathglyphinfo(f,fontdata,offset)
+ setposition(f,offset)
+ local italics = readushort(f)
+ local accents = readushort(f)
+ local extensions = readushort(f)
+ local kerns = readushort(f)
+ local glyphs = fontdata.glyphs
+ if italics ~= 0 then
+ setposition(f,offset+italics)
+ local coverage = readushort(f)
+ local nofglyphs = readushort(f)
+ coverage = readcoverage(f,offset+italics+coverage,true)
+ for i=1,nofglyphs do
+ local italic = readshort(f)
+ if italic ~= 0 then
+ local glyph = glyphs[coverage[i]]
+ local math = glyph.math
+ if not math then
+ glyph.math = { italic = italic }
+ else
+ math.italic = italic
+ end
+ end
+ end
+ fontdata.hasitalics = true
+ end
+ if accents ~= 0 then
+ setposition(f,offset+accents)
+ local coverage = readushort(f)
+ local nofglyphs = readushort(f)
+ coverage = readcoverage(f,offset+accents+coverage,true)
+ for i=1,nofglyphs do
+ local accent = readshort(f)
+ if accent ~= 0 then
+ local glyph = glyphs[coverage[i]]
+ local math = glyph.math
+ if not math then
+ glyph.math = { accent = accent }
+ else
+ math.accent = accent
+ end
+ end
+ end
+ end
+ if extensions ~= 0 then
+ setposition(f,offset+extensions)
+ end
+ if kerns ~= 0 then
+ local kernoffset = offset + kerns
+ setposition(f,kernoffset)
+ local coverage = readushort(f)
+ local nofglyphs = readushort(f)
+ if nofglyphs > 0 then
+ local function get(offset)
+ setposition(f,kernoffset+offset)
+ local n = readushort(f)
+ if n > 0 then
+ local l = { }
+ -- for i=1,n do
+ -- l[i] = { readushort(f), 0 } -- height, kern
+ -- skipshort(f)
+ -- end
+ -- for i=1,n do
+ -- l[i][2] = readushort(f)
+ -- skipshort(f)
+ -- end
+ for i=1,n do
+ l[i] = { height = readmathvalue(f) }
+ end
+ for i=1,n do
+ l[i].kern = readmathvalue(f)
+ end
+ l[n+1] = { kern = readmathvalue(f) }
+ return l
+ end
+ end
+ local kernsets = { }
+ for i=1,nofglyphs do
+ local topright = readushort(f)
+ local topleft = readushort(f)
+ local bottomright = readushort(f)
+ local bottomleft = readushort(f)
+ kernsets[i] = {
+ topright = topright ~= 0 and topright or nil,
+ topleft = topleft ~= 0 and topleft or nil,
+ bottomright = bottomright ~= 0 and bottomright or nil,
+ bottomleft = bottomleft ~= 0 and bottomleft or nil,
+ }
+ end
+ coverage = readcoverage(f,kernoffset+coverage,true)
+ for i=1,nofglyphs do
+ local kernset = kernsets[i]
+ if next(kernset) then
+ local k = kernset.topright if k then kernset.topright = get(k) end
+ local k = kernset.topleft if k then kernset.topleft = get(k) end
+ local k = kernset.bottomright if k then kernset.bottomright = get(k) end
+ local k = kernset.bottomleft if k then kernset.bottomleft = get(k) end
+ if next(kernset) then
+ local glyph = glyphs[coverage[i]]
+ local math = glyph.math
+ if not math then
+ glyph.math = { kerns = kernset }
+ else
+ math.kerns = kernset
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
+local function readmathvariants(f,fontdata,offset)
+ setposition(f,offset)
+ local glyphs = fontdata.glyphs
+ local minoverlap = readushort(f)
+ local vcoverage = readushort(f)
+ local hcoverage = readushort(f)
+ local vnofglyphs = readushort(f)
+ local hnofglyphs = readushort(f)
+ local vconstruction = { }
+ local hconstruction = { }
+ for i=1,vnofglyphs do
+ vconstruction[i] = readushort(f)
+ end
+ for i=1,hnofglyphs do
+ hconstruction[i] = readushort(f)
+ end
+
+ fontdata.mathconstants.MinConnectorOverlap = minoverlap
+
+ -- variants[i] = {
+ -- glyph = readushort(f),
+ -- advance = readushort(f),
+ -- }
+
+ local function get(offset,coverage,nofglyphs,construction,kvariants,kparts)
+ if coverage ~= 0 and nofglyphs > 0 then
+ local coverage = readcoverage(f,offset+coverage,true)
+ for i=1,nofglyphs do
+ local c = construction[i]
+ if c ~= 0 then
+ local index = coverage[i]
+ local glyph = glyphs[index]
+ local math = glyph.math
+ setposition(f,offset+c)
+ local assembly = readushort(f)
+ local nofvariants = readushort(f)
+ if nofvariants > 0 then
+ local variants, v = nil, 0
+ for i=1,nofvariants do
+ local variant = readushort(f)
+ if variant == index then
+ -- ignore
+ elseif variants then
+ v = v + 1
+ variants[v] = variant
+ else
+ v = 1
+ variants = { variant }
+ end
+ skipshort(f)
+ end
+ if not variants then
+ -- only self
+ elseif not math then
+ math = { [kvariants] = variants }
+ glyph.math = math
+ else
+ math[kvariants] = variants
+ end
+ end
+ if assembly ~= 0 then
+ setposition(f,offset + c + assembly)
+ local italics = readmathvalue(f)
+ local nofparts = readushort(f)
+ local parts = { }
+ for i=1,nofparts do
+ local p = {
+ glyph = readushort(f),
+ start = readushort(f),
+ ["end"] = readushort(f),
+ advance = readushort(f),
+ }
+ local flags = readushort(f)
+ if bittest(flags,0x0001) then
+ p.extender = 1 -- true
+ end
+ parts[i] = p
+ end
+ if not math then
+ math = { [kparts] = parts }
+ glyph.math = math
+ else
+ math[kparts] = parts
+ end
+ end
+ end
+ end
+ end
+ end
+
+ get(offset,vcoverage,vnofglyphs,vconstruction,"vvariants","vparts")
+ get(offset,hcoverage,hnofglyphs,hconstruction,"hvariants","hparts")
+end
+
+function readers.math(f,fontdata,specification)
+ if specification.glyphs then
+ local datatable = fontdata.tables.math
+ if datatable then
+ local tableoffset = datatable.offset
+ setposition(f,tableoffset)
+ local version = readulong(f)
+ if version ~= 0x00010000 then
+ report("table version %a of %a is not supported (yet), maybe font %s is bad",version,what,fontdata.filename)
+ return
+ end
+ local constants = readushort(f)
+ local glyphinfo = readushort(f)
+ local variants = readushort(f)
+ if constants == 0 then
+ report("the math table of %a has no constants",fontdata.filename)
+ else
+ readmathconstants(f,fontdata,tableoffset+constants)
+ end
+ if glyphinfo ~= 0 then
+ readmathglyphinfo(f,fontdata,tableoffset+glyphinfo)
+ end
+ if variants ~= 0 then
+ readmathvariants(f,fontdata,tableoffset+variants)
+ end
+ end
+ end
+end
diff --git a/tex/context/base/font-gds.lua b/tex/context/base/font-gds.lua
index 073d9ed57..7dfbee5a7 100644
--- a/tex/context/base/font-gds.lua
+++ b/tex/context/base/font-gds.lua
@@ -611,16 +611,20 @@ local function initialize(tfmdata)
local hfactor = parameters.hfactor
for k, v in next, corrections do
local c = characters[k]
- if v > -1 and v < 1 then
- v = v * quad
- else
- v = v * hfactor
- end
- c.italic_correction = v -- for context
- if mathitalics then
- c.italic = v -- for tex
+ if c then
+ if v > -1 and v < 1 then
+ v = v * quad
+ else
+ v = v * hfactor
+ end
+ c.italic_correction = v -- for context
+ if mathitalics then
+ c.italic = v -- for tex
+ else
+ c.italic = nil
+ end
else
- c.italic = nil
+ report_goodies("invalid mathitalics entry %U for font %a",k,properties.name)
end
end
end)
diff --git a/tex/context/base/font-lib.mkvi b/tex/context/base/font-lib.mkvi
index 9478db71b..b1050f7f5 100644
--- a/tex/context/base/font-lib.mkvi
+++ b/tex/context/base/font-lib.mkvi
@@ -61,7 +61,6 @@
\registerctxluafile{font-otp}{1.001} % otf pack
\registerctxluafile{font-otc}{1.001} % otf context
\registerctxluafile{font-oth}{1.001} % otf helpers
-\registerctxluafile{font-otl}{1.001} % otf new node mode
\registerctxluafile{font-odv}{1.001} % otf devanagari (experimental)
diff --git a/tex/context/base/font-mis.lua b/tex/context/base/font-mis.lua
index d573750a4..931c755fd 100644
--- a/tex/context/base/font-mis.lua
+++ b/tex/context/base/font-mis.lua
@@ -22,7 +22,7 @@ local handlers = fonts.handlers
handlers.otf = handlers.otf or { }
local otf = handlers.otf
-otf.version = otf.version or 2.816
+otf.version = otf.version or 2.817
otf.cache = otf.cache or containers.define("fonts", "otf", otf.version, true)
local fontloader = fontloader
diff --git a/tex/context/base/font-osd.lua b/tex/context/base/font-osd.lua
new file mode 100644
index 000000000..368588206
--- /dev/null
+++ b/tex/context/base/font-osd.lua
@@ -0,0 +1,2397 @@
+if not modules then modules = { } end modules ['font-osd'] = { -- script devanagari
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Kai Eigner, TAT Zetwerk / Hans Hagen, PRAGMA ADE",
+ copyright = "TAT Zetwerk / PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This is a version of font-odv.lua adapted to the new font loader and more
+-- direct hashing. The initialization code has been adapted (more efficient). One day
+-- I'll speed this up ... char swapping and properties.
+
+-- A few remarks:
+--
+-- This code is a partial rewrite of the code that deals with devanagari. The data and logic
+-- is by Kai Eigner and based based on Microsoft's OpenType specifications for specific
+-- scripts, but with a few improvements. More information can be found at:
+--
+-- deva: http://www.microsoft.com/typography/OpenType%20Dev/devanagari/introO.mspx
+-- dev2: http://www.microsoft.com/typography/OpenType%20Dev/devanagari/intro.mspx
+--
+-- Rajeesh Nambiar provided patches for the malayalam variant. Thanks to feedback from
+-- the mailing list some aspects could be improved.
+--
+-- As I touched nearly all code, reshuffled it, optimized a lot, etc. etc. (imagine how
+-- much can get messed up in over a week work) it could be that I introduced bugs. There
+-- is more to gain (esp in the functions applied to a range) but I'll do that when
+-- everything works as expected. Kai's original code is kept in font-odk.lua as a reference
+-- so blame me (HH) for bugs.
+--
+-- Interesting is that Kai managed to write this on top of the existing otf handler. Only a
+-- few extensions were needed, like a few more analyzing states and dealing with changed
+-- head nodes in the core scanner as that only happens here. There's a lot going on here
+-- and it's only because I touched nearly all code that I got a bit of a picture of what
+-- happens. For in-depth knowledge one needs to consult Kai.
+--
+-- The rewrite mostly deals with efficiency, both in terms of speed and code. We also made
+-- sure that it suits generic use as well as use in ConTeXt. I removed some buglets but can
+-- as well have messed up the logic by doing this. For this we keep the original around
+-- as that serves as reference. Due to the lots of reshuffling glyphs quite some leaks
+-- occur(red) but once I'm satisfied with the rewrite I'll weed them. I also integrated
+-- initialization etc into the regular mechanisms.
+--
+-- In the meantime, we're down from 25.5-3.5=22 seconds to 17.7-3.5=14.2 seconds for a 100
+-- page sample (mid 2012) with both variants so it's worth the effort. Some more speedup is
+-- to be expected. Due to the method chosen it will never be real fast. If I ever become a
+-- power user I'll have a go at some further speed up. I will rename some functions (and
+-- features) once we don't need to check the original code. We now use a special subset
+-- sequence for use inside the analyzer (after all we could can store this in the dataset
+-- and save redundant analysis).
+--
+-- I might go for an array approach with respect to attributes (and reshuffling). Easier.
+--
+-- Some data will move to char-def.lua (some day).
+--
+-- Hans Hagen, PRAGMA-ADE, Hasselt NL
+--
+-- We could have c_nukta, c_halant, c_ra is we know that they are never used mixed within
+-- one script .. yes or no?
+--
+-- Matras: according to Microsoft typography specifications "up to one of each type:
+-- pre-, above-, below- or post- base", but that does not seem to be right. It could
+-- become an option.
+--
+-- The next code looks weird anyway: the "and boolean" should move inside the if
+-- or we should check differently (case vs successive).
+--
+-- local function ms_matra(c)
+-- local prebase, abovebase, belowbase, postbase = true, true, true, true
+-- local n = getnext(c)
+-- while n and getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font do
+-- local char = getchar(n)
+-- if not dependent_vowel[char] then
+-- break
+-- elseif pre_mark[char] and prebase then
+-- prebase = false
+-- elseif above_mark[char] and abovebase then
+-- abovebase = false
+-- elseif below_mark[char] and belowbase then
+-- belowbase = false
+-- elseif post_mark[char] and postbase then
+-- postbase = false
+-- else
+-- return c
+-- end
+-- c = getnext(c)
+-- end
+-- return c
+-- end
+
+-- todo: first test for font then for subtype
+
+local insert, imerge, copy = table.insert, table.imerge, table.copy
+local next, type = next, type
+
+local report_devanagari = logs.reporter("otf","devanagari")
+
+fonts = fonts or { }
+fonts.analyzers = fonts.analyzers or { }
+fonts.analyzers.methods = fonts.analyzers.methods or { node = { otf = { } } }
+
+local otf = fonts.handlers.otf
+
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
+
+local handlers = otf.handlers
+local methods = fonts.analyzers.methods
+
+local otffeatures = fonts.constructors.newfeatures("otf")
+local registerotffeature = otffeatures.register
+
+local nuts = nodes.nuts
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getchar = nuts.getchar
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getprop = nuts.getprop
+local setprop = nuts.setprop
+
+local insert_node_after = nuts.insert_after
+local copy_node = nuts.copy
+local free_node = nuts.free
+local remove_node = nuts.remove
+local flush_list = nuts.flush_list
+
+local copyinjection = nodes.injections.copy -- KE: is this necessary? HH: probably not as positioning comes later and we rawget/set
+
+local unsetvalue = attributes.unsetvalue
+
+local fontdata = fonts.hashes.identifiers
+
+local a_state = attributes.private('state')
+local a_syllabe = attributes.private('syllabe')
+
+local dotted_circle = 0x25CC
+
+local states = fonts.analyzers.states -- not features
+
+local s_rphf = states.rphf
+local s_half = states.half
+local s_pref = states.pref
+local s_blwf = states.blwf
+local s_pstf = states.pstf
+
+local replace_all_nbsp = nil
+
+replace_all_nbsp = function(head) -- delayed definition
+ replace_all_nbsp = typesetters and typesetters.characters and typesetters.characters.replacenbspaces or function(head)
+ return head
+ end
+ return replace_all_nbsp(head)
+end
+
+local xprocesscharacters = nil
+
+if context then
+ xprocesscharacters = function(head,font)
+ xprocesscharacters = nodes.handlers.characters
+ return xprocesscharacters(head,font)
+ end
+else
+ xprocesscharacters = function(head,font)
+ xprocesscharacters = nodes.handlers.nodepass -- generic
+ return xprocesscharacters(head,font)
+ end
+end
+
+local function processcharacters(head,font)
+ return tonut(xprocesscharacters(tonode(head))) -- can be more efficient in context, just direct call
+end
+
+-- local fontprocesses = fonts.hashes.processes
+--
+-- function processcharacters(head,font)
+-- local processors = fontprocesses[font]
+-- for i=1,#processors do
+-- head = processors[i](head,font,0)
+-- end
+-- return head, true
+-- end
+
+-- In due time there will be entries here for scripts like Bengali, Gujarati,
+-- Gurmukhi, Kannada, Malayalam, Oriya, Tamil, Telugu. Feel free to provide the
+-- code points.
+
+-- We can assume that script are not mixed in the source but if that is the case
+-- we might need to have consonants etc per script and initialize a local table
+-- pointing to the right one.
+
+-- new, to be checked:
+--
+-- U+00978 : DEVANAGARI LETTER MARWARI DDA
+-- U+00980 : BENGALI ANJI
+-- U+00C00 : TELUGU SIGN COMBINING CANDRABINDU ABOVE
+-- U+00C34 : TELUGU LETTER LLLA
+-- U+00C81 : KANNADA SIGN CANDRABINDU
+-- U+00D01 : MALAYALAM SIGN CANDRABINDU
+-- U+00DE6 : SINHALA LITH DIGIT ZERO
+-- U+00DE7 : SINHALA LITH DIGIT ONE
+-- U+00DE8 : SINHALA LITH DIGIT TWO
+-- U+00DE9 : SINHALA LITH DIGIT THREE
+-- U+00DEA : SINHALA LITH DIGIT FOUR
+-- U+00DEB : SINHALA LITH DIGIT FIVE
+-- U+00DEC : SINHALA LITH DIGIT SIX
+-- U+00DED : SINHALA LITH DIGIT SEVEN
+-- U+00DEE : SINHALA LITH DIGIT EIGHT
+-- U+00DEF : SINHALA LITH DIGIT NINE
+
+local consonant = {
+ -- devanagari
+ [0x0915] = true, [0x0916] = true, [0x0917] = true, [0x0918] = true,
+ [0x0919] = true, [0x091A] = true, [0x091B] = true, [0x091C] = true,
+ [0x091D] = true, [0x091E] = true, [0x091F] = true, [0x0920] = true,
+ [0x0921] = true, [0x0922] = true, [0x0923] = true, [0x0924] = true,
+ [0x0925] = true, [0x0926] = true, [0x0927] = true, [0x0928] = true,
+ [0x0929] = true, [0x092A] = true, [0x092B] = true, [0x092C] = true,
+ [0x092D] = true, [0x092E] = true, [0x092F] = true, [0x0930] = true,
+ [0x0931] = true, [0x0932] = true, [0x0933] = true, [0x0934] = true,
+ [0x0935] = true, [0x0936] = true, [0x0937] = true, [0x0938] = true,
+ [0x0939] = true, [0x0958] = true, [0x0959] = true, [0x095A] = true,
+ [0x095B] = true, [0x095C] = true, [0x095D] = true, [0x095E] = true,
+ [0x095F] = true, [0x0979] = true, [0x097A] = true,
+ -- kannada
+ [0x0C95] = true, [0x0C96] = true, [0x0C97] = true, [0x0C98] = true,
+ [0x0C99] = true, [0x0C9A] = true, [0x0C9B] = true, [0x0C9C] = true,
+ [0x0C9D] = true, [0x0C9E] = true, [0x0C9F] = true, [0x0CA0] = true,
+ [0x0CA1] = true, [0x0CA2] = true, [0x0CA3] = true, [0x0CA4] = true,
+ [0x0CA5] = true, [0x0CA6] = true, [0x0CA7] = true, [0x0CA8] = true,
+ [0x0CA9] = true, [0x0CAA] = true, [0x0CAB] = true, [0x0CAC] = true,
+ [0x0CAD] = true, [0x0CAE] = true, [0x0CAF] = true, [0x0CB0] = true,
+ [0x0CB1] = true, [0x0CB2] = true, [0x0CB3] = true, [0x0CB4] = true,
+ [0x0CB5] = true, [0x0CB6] = true, [0x0CB7] = true, [0x0CB8] = true,
+ [0x0CB9] = true,
+ [0x0CDE] = true, -- obsolete
+ -- malayalam
+ [0x0D15] = true, [0x0D16] = true, [0x0D17] = true, [0x0D18] = true,
+ [0x0D19] = true, [0x0D1A] = true, [0x0D1B] = true, [0x0D1C] = true,
+ [0x0D1D] = true, [0x0D1E] = true, [0x0D1F] = true, [0x0D20] = true,
+ [0x0D21] = true, [0x0D22] = true, [0x0D23] = true, [0x0D24] = true,
+ [0x0D25] = true, [0x0D26] = true, [0x0D27] = true, [0x0D28] = true,
+ [0x0D29] = true, [0x0D2A] = true, [0x0D2B] = true, [0x0D2C] = true,
+ [0x0D2D] = true, [0x0D2E] = true, [0x0D2F] = true, [0x0D30] = true,
+ [0x0D31] = true, [0x0D32] = true, [0x0D33] = true, [0x0D34] = true,
+ [0x0D35] = true, [0x0D36] = true, [0x0D37] = true, [0x0D38] = true,
+ [0x0D39] = true, [0x0D3A] = true,
+}
+
+local independent_vowel = {
+ -- devanagari
+ [0x0904] = true, [0x0905] = true, [0x0906] = true, [0x0907] = true,
+ [0x0908] = true, [0x0909] = true, [0x090A] = true, [0x090B] = true,
+ [0x090C] = true, [0x090D] = true, [0x090E] = true, [0x090F] = true,
+ [0x0910] = true, [0x0911] = true, [0x0912] = true, [0x0913] = true,
+ [0x0914] = true, [0x0960] = true, [0x0961] = true, [0x0972] = true,
+ [0x0973] = true, [0x0974] = true, [0x0975] = true, [0x0976] = true,
+ [0x0977] = true,
+ -- kannada
+ [0x0C85] = true, [0x0C86] = true, [0x0C87] = true, [0x0C88] = true,
+ [0x0C89] = true, [0x0C8A] = true, [0x0C8B] = true, [0x0C8C] = true,
+ [0x0C8D] = true, [0x0C8E] = true, [0x0C8F] = true, [0x0C90] = true,
+ [0x0C91] = true, [0x0C92] = true, [0x0C93] = true, [0x0C94] = true,
+ -- malayalam
+ [0x0D05] = true, [0x0D06] = true, [0x0D07] = true, [0x0D08] = true,
+ [0x0D09] = true, [0x0D0A] = true, [0x0D0B] = true, [0x0D0C] = true,
+ [0x0D0E] = true, [0x0D0F] = true, [0x0D10] = true, [0x0D12] = true,
+ [0x0D13] = true, [0x0D14] = true,
+}
+
+local dependent_vowel = { -- matra
+ -- devanagari
+ [0x093A] = true, [0x093B] = true, [0x093E] = true, [0x093F] = true,
+ [0x0940] = true, [0x0941] = true, [0x0942] = true, [0x0943] = true,
+ [0x0944] = true, [0x0945] = true, [0x0946] = true, [0x0947] = true,
+ [0x0948] = true, [0x0949] = true, [0x094A] = true, [0x094B] = true,
+ [0x094C] = true, [0x094E] = true, [0x094F] = true, [0x0955] = true,
+ [0x0956] = true, [0x0957] = true, [0x0962] = true, [0x0963] = true,
+ -- kannada
+ [0x0CBE] = true, [0x0CBF] = true, [0x0CC0] = true, [0x0CC1] = true,
+ [0x0CC2] = true, [0x0CC3] = true, [0x0CC4] = true, [0x0CC5] = true,
+ [0x0CC6] = true, [0x0CC7] = true, [0x0CC8] = true, [0x0CC9] = true,
+ [0x0CCA] = true, [0x0CCB] = true, [0x0CCC] = true,
+ -- malayalam
+ [0x0D3E] = true, [0x0D3F] = true, [0x0D40] = true, [0x0D41] = true,
+ [0x0D42] = true, [0x0D43] = true, [0x0D44] = true, [0x0D46] = true,
+ [0x0D47] = true, [0x0D48] = true, [0x0D4A] = true, [0x0D4B] = true,
+ [0x0D4C] = true, [0x0D57] = true,
+}
+
+local vowel_modifier = {
+ -- devanagari
+ [0x0900] = true, [0x0901] = true, [0x0902] = true, [0x0903] = true,
+ -- A8E0 - A8F1 are cantillation marks for the Samaveda and may not belong here.
+ [0xA8E0] = true, [0xA8E1] = true, [0xA8E2] = true, [0xA8E3] = true,
+ [0xA8E4] = true, [0xA8E5] = true, [0xA8E6] = true, [0xA8E7] = true,
+ [0xA8E8] = true, [0xA8E9] = true, [0xA8EA] = true, [0xA8EB] = true,
+ [0xA8EC] = true, [0xA8ED] = true, [0xA8EE] = true, [0xA8EF] = true,
+ [0xA8F0] = true, [0xA8F1] = true,
+ -- malayalam
+ [0x0D02] = true, [0x0D03] = true,
+}
+
+local stress_tone_mark = {
+ [0x0951] = true, [0x0952] = true, [0x0953] = true, [0x0954] = true,
+ -- kannada
+ [0x0CCD] = true,
+ -- malayalam
+ [0x0D4D] = true,
+}
+
+local nukta = {
+ -- devanagari
+ [0x093C] = true,
+ -- kannada:
+ [0x0CBC] = true,
+}
+
+local halant = {
+ -- devanagari
+ [0x094D] = true,
+ -- kannada
+ [0x0CCD] = true,
+ -- malayalam
+ [0x0D4D] = true,
+}
+
+local ra = {
+ -- devanagari
+ [0x0930] = true,
+ -- kannada
+ [0x0CB0] = true,
+ -- malayalam
+ [0x0D30] = true,
+}
+
+local c_anudatta = 0x0952 -- used to be tables
+local c_nbsp = 0x00A0 -- used to be tables
+local c_zwnj = 0x200C -- used to be tables
+local c_zwj = 0x200D -- used to be tables
+
+local zw_char = { -- could also be inlined
+ [0x200C] = true,
+ [0x200D] = true,
+}
+
+-- 0C82 anusvara
+-- 0C83 visarga
+-- 0CBD avagraha
+-- 0CD5 length mark
+-- 0CD6 ai length mark
+-- 0CE0 letter ll
+-- 0CE1 letter rr
+-- 0CE2 vowel sign l
+-- 0CE2 vowel sign ll
+-- 0CF1 sign
+-- 0CF2 sign
+-- OCE6 - OCEF digits
+
+local pre_mark = {
+ [0x093F] = true, [0x094E] = true,
+ -- malayalam
+ [0x0D46] = true, [0x0D47] = true, [0x0D48] = true,
+}
+
+local above_mark = {
+ [0x0900] = true, [0x0901] = true, [0x0902] = true, [0x093A] = true,
+ [0x0945] = true, [0x0946] = true, [0x0947] = true, [0x0948] = true,
+ [0x0951] = true, [0x0953] = true, [0x0954] = true, [0x0955] = true,
+ [0xA8E0] = true, [0xA8E1] = true, [0xA8E2] = true, [0xA8E3] = true,
+ [0xA8E4] = true, [0xA8E5] = true, [0xA8E6] = true, [0xA8E7] = true,
+ [0xA8E8] = true, [0xA8E9] = true, [0xA8EA] = true, [0xA8EB] = true,
+ [0xA8EC] = true, [0xA8ED] = true, [0xA8EE] = true, [0xA8EF] = true,
+ [0xA8F0] = true, [0xA8F1] = true,
+ -- malayalam
+ [0x0D4E] = true,
+}
+
+local below_mark = {
+ [0x093C] = true, [0x0941] = true, [0x0942] = true, [0x0943] = true,
+ [0x0944] = true, [0x094D] = true, [0x0952] = true, [0x0956] = true,
+ [0x0957] = true, [0x0962] = true, [0x0963] = true,
+}
+
+local post_mark = {
+ [0x0903] = true, [0x093B] = true, [0x093E] = true, [0x0940] = true,
+ [0x0949] = true, [0x094A] = true, [0x094B] = true, [0x094C] = true,
+ [0x094F] = true,
+}
+
+local twopart_mark = {
+ -- malayalam
+ [0x0D4A] = { 0x0D46, 0x0D3E, }, -- ൊ
+ [0x0D4B] = { 0x0D47, 0x0D3E, }, -- ോ
+ [0x0D4C] = { 0x0D46, 0x0D57, }, -- ൌ
+}
+
+local mark_four = { } -- As we access these frequently an extra hash is used.
+
+for k, v in next, pre_mark do mark_four[k] = pre_mark end
+for k, v in next, above_mark do mark_four[k] = above_mark end
+for k, v in next, below_mark do mark_four[k] = below_mark end
+for k, v in next, post_mark do mark_four[k] = post_mark end
+
+local mark_above_below_post = { }
+
+for k, v in next, above_mark do mark_above_below_post[k] = above_mark end
+for k, v in next, below_mark do mark_above_below_post[k] = below_mark end
+for k, v in next, post_mark do mark_above_below_post[k] = post_mark end
+
+-- Again, this table can be extended for other scripts than devanagari. Actually,
+-- for ConTeXt this kind of data is kept elsewhere so eventually we might move
+-- tables to someplace else.
+
+local reorder_class = {
+ -- devanagari
+ [0x0930] = "before postscript",
+ [0x093F] = "before half",
+ [0x0940] = "after subscript",
+ [0x0941] = "after subscript",
+ [0x0942] = "after subscript",
+ [0x0943] = "after subscript",
+ [0x0944] = "after subscript",
+ [0x0945] = "after subscript",
+ [0x0946] = "after subscript",
+ [0x0947] = "after subscript",
+ [0x0948] = "after subscript",
+ [0x0949] = "after subscript",
+ [0x094A] = "after subscript",
+ [0x094B] = "after subscript",
+ [0x094C] = "after subscript",
+ [0x0962] = "after subscript",
+ [0x0963] = "after subscript",
+ [0x093E] = "after subscript",
+ -- kannada:
+ [0x0CB0] = "after postscript", -- todo in code below
+ [0x0CBF] = "before subscript", -- todo in code below
+ [0x0CC6] = "before subscript", -- todo in code below
+ [0x0CCC] = "before subscript", -- todo in code below
+ [0x0CBE] = "before subscript", -- todo in code below
+ [0x0CE2] = "before subscript", -- todo in code below
+ [0x0CE3] = "before subscript", -- todo in code below
+ [0x0CC1] = "before subscript", -- todo in code below
+ [0x0CC2] = "before subscript", -- todo in code below
+ [0x0CC3] = "after subscript",
+ [0x0CC4] = "after subscript",
+ [0x0CD5] = "after subscript",
+ [0x0CD6] = "after subscript",
+ -- malayalam
+}
+
+-- We use some pseudo features as we need to manipulate the nodelist based
+-- on information in the font as well as already applied features.
+
+local dflt_true = {
+ dflt = true
+}
+
+local dev2_defaults = {
+ dev2 = dflt_true,
+}
+
+local deva_defaults = {
+ dev2 = dflt_true,
+ deva = dflt_true,
+}
+
+local false_flags = { false, false, false, false }
+
+local both_joiners_true = {
+ [0x200C] = true,
+ [0x200D] = true,
+}
+
+local sequence_reorder_matras = {
+ chain = 0,
+ features = { dv01 = dev2_defaults },
+ flags = false_flags,
+ name = "dv01_reorder_matras",
+ order = { "dv01" },
+ type = "devanagari_reorder_matras",
+ nofsteps = 1,
+ steps = {
+ { coverage = pre_mark }
+ }
+}
+
+local sequence_reorder_reph = {
+ chain = 0,
+ features = { dv02 = dev2_defaults },
+ flags = false_flags,
+ name = "dv02_reorder_reph",
+ order = { "dv02" },
+ type = "devanagari_reorder_reph",
+ nofsteps = 1,
+ steps = {
+ { coverage = { } }
+ }
+}
+
+local sequence_reorder_pre_base_reordering_consonants = {
+ chain = 0,
+ features = { dv03 = dev2_defaults },
+ flags = false_flags,
+ name = "dv03_reorder_pre_base_reordering_consonants",
+ order = { "dv03" },
+ type = "devanagari_reorder_pre_base_reordering_consonants",
+ nofsteps = 1,
+ steps = {
+ { coverage = { } }
+ }
+}
+
+local sequence_remove_joiners = {
+ chain = 0,
+ features = { dv04 = deva_defaults },
+ flags = false_flags,
+ name = "dv04_remove_joiners",
+ order = { "dv04" },
+ type = "devanagari_remove_joiners",
+ nofsteps = 1,
+ steps = {
+ { coverage = both_joiners_true },
+ }
+}
+
+-- Looping over feature twice as efficient as looping over basic forms (some
+-- 350 checks instead of 750 for one font). This is something to keep an eye on
+-- as it might depends on the font. Not that it's a bottleneck.
+
+local basic_shaping_forms = {
+ nukt = true,
+ akhn = true,
+ rphf = true,
+ pref = true,
+ rkrf = true,
+ blwf = true,
+ half = true,
+ pstf = true,
+ vatu = true,
+ cjct = true,
+}
+
+local valid = {
+ akhn = true, -- malayalam
+ rphf = true,
+ pref = true,
+ half = true,
+ blwf = true,
+ pstf = true,
+ pres = true, -- malayalam
+ blws = true, -- malayalam
+ psts = true, -- malayalam
+}
+
+local function initializedevanagi(tfmdata)
+ local script, language = otf.scriptandlanguage(tfmdata,attr) -- todo: take fast variant
+ if script == "deva" or script == "dev2" or script =="mlym" or script == "mlm2" then
+ local resources = tfmdata.resources
+ local devanagari = resources.devanagari
+ if not devanagari then
+ --
+ report_devanagari("adding devanagari features to font")
+ --
+ local gsubfeatures = resources.features.gsub
+ local sequences = resources.sequences
+ local sharedfeatures = tfmdata.shared.features
+ --
+ local lastmatch = 0
+ for s=1,#sequences do -- classify chars
+ local features = sequences[s].features
+ if features then
+ for k, v in next, features do
+ if basic_shaping_forms[k] then
+ lastmatch = s
+ end
+ end
+ end
+ end
+ local insertindex = lastmatch + 1
+ --
+ gsubfeatures["dv01"] = dev2_defaults -- reorder matras
+ gsubfeatures["dv02"] = dev2_defaults -- reorder reph
+ gsubfeatures["dv03"] = dev2_defaults -- reorder pre base reordering consonants
+ gsubfeatures["dv04"] = deva_defaults -- remove joiners
+ --
+ local reorder_pre_base_reordering_consonants = copy(sequence_reorder_pre_base_reordering_consonants)
+ local reorder_reph = copy(sequence_reorder_reph)
+ local reorder_matras = copy(sequence_reorder_matras)
+ local remove_joiners = copy(sequence_remove_joiners)
+ --
+ insert(sequences,insertindex,reorder_pre_base_reordering_consonants)
+ insert(sequences,insertindex,reorder_reph)
+ insert(sequences,insertindex,reorder_matras)
+ insert(sequences,insertindex,remove_joiners)
+ --
+ local blwfcache = { }
+ local seqsubset = { }
+ local rephstep = {
+ coverage = { } -- will be adapted each work
+ }
+ local devanagari = {
+ reph = false,
+ vattu = false,
+ blwfcache = blwfcache,
+ seqsubset = seqsubset,
+ reorderreph = rephstep,
+
+ }
+ --
+ reorder_reph.steps = { rephstep }
+ --
+ local pre_base_reordering_consonants = { }
+ reorder_pre_base_reordering_consonants.steps[1].coverage = pre_base_reordering_consonants
+ --
+ resources.devanagari = devanagari
+ --
+ for s=1,#sequences do
+ local sequence = sequences[s]
+ local steps = sequence.steps
+ local nofsteps = sequence.nofsteps
+ local features = sequence.features
+ if features["rphf"] then
+ -- deva
+ devanagari.reph = true
+ elseif features["blwf"] then
+ -- deva
+ devanagari.vattu = true
+ -- dev2
+ for i=1,nofsteps do
+ local step = steps[i]
+ local coverage = step.coverage
+ if coverage then
+ for k, v in next, coverage do
+ if not blwfcache[k] then
+ blwfcache[k] = v
+ end
+ end
+ end
+ end
+ end
+ if valid[kind] then
+ for i=1,nofsteps do
+ local step = steps[i]
+ local coverage = step.coverage
+ if coverage then
+ local reph = false
+ local chain = dataset[3]
+ if chain ~= 0 then --rphf is result of of chain
+ -- rphf might be result of other handler/chainproc
+ else
+ -- rphf acts on consonant + halant
+ for k, v in next, ra do
+ local r = coverage[k]
+ if r then
+ local h = false
+ for k, v in next, halant do
+ local h = r[k]
+ if h then
+ reph = h.ligature or false
+ break
+ end
+ end
+ if reph then
+ break
+ end
+ end
+ end
+ end
+ seqsubset[#seqsubset+1] = { kind, coverage, reph }
+ end
+ end
+ end
+ if kind == "pref" then
+ local sequence = dataset[5]
+ local steps = sequence.steps
+ local nofsteps = sequence.nofsteps
+ for i=1,nofsteps do
+ local step = steps[i]
+ local coverage = step.coverage
+ if coverage then
+ for k, v in next, halant do
+ local h = coverage[k]
+ if h then
+ local found = false
+ for k, v in next, h do
+ found = v and v.ligature
+ if found then
+ pre_base_reordering_consonants[k] = found
+ break
+ end
+ end
+ if found then
+ break
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ --
+ if script == "deva" then
+ sharedfeatures["dv04"] = true -- dv04_remove_joiners
+ elseif script == "dev2" then
+ sharedfeatures["dv01"] = true -- dv01_reorder_matras
+ sharedfeatures["dv02"] = true -- dv02_reorder_reph
+ sharedfeatures["dv03"] = true -- dv03_reorder_pre_base_reordering_consonants
+ sharedfeatures["dv04"] = true -- dv04_remove_joiners
+ elseif script == "mlym" then
+ sharedfeatures["pstf"] = true
+ elseif script == "mlm2" then
+ sharedfeatures["pstf"] = true
+ sharedfeatures["pref"] = true
+ sharedfeatures["dv03"] = true -- dv03_reorder_pre_base_reordering_consonants
+ gsubfeatures ["dv03"] = dev2_defaults -- reorder pre base reordering consonants
+ insert(sequences,insertindex,sequence_reorder_pre_base_reordering_consonants)
+ end
+ end
+ end
+end
+
+registerotffeature {
+ name = "devanagari",
+ description = "inject additional features",
+ default = true,
+ initializers = {
+ node = initializedevanagi,
+ },
+}
+
+-- hm, this is applied to one character:
+
+local function deva_initialize(font,attr) -- we need a proper hook into the dataset initializer
+
+ local tfmdata = fontdata[font]
+ local datasets = otf.dataset(tfmdata,font,attr) -- don't we know this one?
+ local devanagaridata = datasets.devanagari
+
+ if not devanagaridata then
+
+ devanagaridata = {
+ reph = false,
+ vattu = false,
+ blwfcache = { },
+ }
+ datasets.devanagari = devanagaridata
+ local resources = tfmdata.resources
+ local devanagari = resources.devanagari
+
+ for s=1,#datasets do
+ local dataset = datasets[s]
+ if dataset and dataset[1] then -- value
+ local kind = dataset[4]
+ if kind == "rphf" then
+ -- deva
+ devanagaridata.reph = true
+ elseif kind == "blwf" then
+ -- deva
+ devanagaridata.vattu = true
+ -- dev2
+ devanagaridata.blwfcache = devanagari.blwfcache
+ end
+ end
+ end
+
+ end
+
+ return devanagaridata.reph, devanagaridata.vattu, devanagaridata.blwfcache
+
+end
+
+local function deva_reorder(head,start,stop,font,attr,nbspaces)
+
+ local reph, vattu, blwfcache = deva_initialize(font,attr) -- todo: a hash[font]
+
+ local current = start
+ local n = getnext(start)
+ local base = nil
+ local firstcons = nil
+ local lastcons = nil
+ local basefound = false
+
+ if ra[getchar(start)] and halant[getchar(n)] and reph then
+ -- if syllable starts with Ra + H and script has 'Reph' then exclude Reph
+ -- from candidates for base consonants
+ if n == stop then
+ return head, stop, nbspaces
+ end
+ if getchar(getnext(n)) == c_zwj then
+ current = start
+ else
+ current = getnext(n)
+ setprop(start,a_state,s_rphf)
+ end
+ end
+
+ if getchar(current) == c_nbsp then
+ -- Stand Alone cluster
+ if current == stop then
+ stop = getprev(stop)
+ head = remove_node(head,current)
+ free_node(current)
+ return head, stop, nbspaces
+ else
+ nbspaces = nbspaces + 1
+ base = current
+ firstcons = current
+ lastcons = current
+ current = getnext(current)
+ if current ~= stop then
+ if nukta[getchar(current)] then
+ current = getnext(current)
+ end
+ if getchar(current) == c_zwj then
+ if current ~= stop then
+ local next = getnext(current)
+ if next ~= stop and halant[getchar(next)] then
+ current = next
+ next = getnext(current)
+ local tmp = next and getnext(next) or nil -- needs checking
+ local changestop = next == stop
+ local tempcurrent = copy_node(next)
+ copyinjection(tempcurrent,next)
+ local nextcurrent = copy_node(current)
+ copyinjection(nextcurrent,current) -- KE: necessary? HH: probably not as positioning comes later and we rawget/set
+ setfield(tempcurrent,"next",nextcurrent)
+ setfield(nextcurrent,"prev",tempcurrent)
+ setprop(tempcurrent,a_state,s_blwf)
+ tempcurrent = processcharacters(tempcurrent,font)
+ setprop(tempcurrent,a_state,unsetvalue)
+ if getchar(next) == getchar(tempcurrent) then
+ flush_list(tempcurrent)
+ local n = copy_node(current)
+ copyinjection(n,current) -- KE: necessary? HH: probably not as positioning comes later and we rawget/set
+ setfield(current,"char",dotted_circle)
+ head = insert_node_after(head, current, n)
+ else
+ setfield(current,"char",getchar(tempcurrent)) -- we assumes that the result of blwf consists of one node
+ local freenode = getnext(current)
+ setfield(current,"next",tmp)
+ if tmp then
+ setfield(tmp,"prev",current)
+ end
+ free_node(freenode)
+ flush_list(tempcurrent)
+ if changestop then
+ stop = current
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ while not basefound do
+ -- find base consonant
+ if consonant[getchar(current)] then
+ setprop(current,a_state,s_half)
+ if not firstcons then
+ firstcons = current
+ end
+ lastcons = current
+ if not base then
+ base = current
+ elseif blwfcache[getchar(current)] then
+ -- consonant has below-base (or post-base) form
+ setprop(current,a_state,s_blwf)
+ else
+ base = current
+ end
+ end
+ basefound = current == stop
+ current = getnext(current)
+ end
+
+ if base ~= lastcons then
+ -- if base consonant is not last one then move halant from base consonant to last one
+ local np = base
+ local n = getnext(base)
+ if nukta[getchar(n)] then
+ np = n
+ n = getnext(n)
+ end
+ if halant[getchar(n)] then
+ if lastcons ~= stop then
+ local ln = getnext(lastcons)
+ if nukta[getchar(ln)] then
+ lastcons = ln
+ end
+ end
+ -- local np = getprev(n)
+ local nn = getnext(n)
+ local ln = getnext(lastcons) -- what if lastcons is nn ?
+ setfield(np,"next",nn)
+ setfield(nn,"prev",np)
+ setfield(lastcons,"next",n)
+ if ln then
+ setfield(ln,"prev",n)
+ end
+ setfield(n,"next",ln)
+ setfield(n,"prev",lastcons)
+ if lastcons == stop then
+ stop = n
+ end
+ end
+ end
+
+ n = getnext(start)
+ if n ~= stop and ra[getchar(start)] and halant[getchar(n)] and not zw_char[getchar(getnext(n))] then
+ -- if syllable starts with Ra + H then move this combination so that it follows either:
+ -- the post-base 'matra' (if any) or the base consonant
+ local matra = base
+ if base ~= stop then
+ local next = getnext(base)
+ if dependent_vowel[getchar(next)] then
+ matra = next
+ end
+ end
+ -- [sp][start][n][nn] [matra|base][?]
+ -- [matra|base][start] [n][?] [sp][nn]
+ local sp = getprev(start)
+ local nn = getnext(n)
+ local mn = getnext(matra)
+ if sp then
+ setfield(sp,"next",nn)
+ end
+ setfield(nn,"prev",sp)
+ setfield(matra,"next",start)
+ setfield(start,"prev",matra)
+ setfield(n,"next",mn)
+ if mn then
+ setfield(mn,"prev",n)
+ end
+ if head == start then
+ head = nn
+ end
+ start = nn
+ if matra == stop then
+ stop = n
+ end
+ end
+
+ local current = start
+ while current ~= stop do
+ local next = getnext(current)
+ if next ~= stop and halant[getchar(next)] and getchar(getnext(next)) == c_zwnj then
+ setprop(current,a_state,unsetvalue)
+ end
+ current = next
+ end
+
+ if base ~= stop and getprop(base,a_state) then
+ local next = getnext(base)
+ if halant[getchar(next)] and not (next ~= stop and getchar(getnext(next)) == c_zwj) then
+ setprop(base,a_state,unsetvalue)
+ end
+ end
+
+ -- ToDo: split two- or three-part matras into their parts. Then, move the left 'matra' part to the beginning of the syllable.
+ -- Not necessary for Devanagari. However it is necessay for other scripts, such as Tamil (e.g. TAMIL VOWEL SIGN O - 0BCA)
+
+ -- classify consonants and 'matra' parts as pre-base, above-base (Reph), below-base or post-base, and group elements of the syllable (consonants and 'matras') according to this classification
+
+ local current, allreordered, moved = start, false, { [base] = true }
+ local a, b, p, bn = base, base, base, getnext(base)
+ if base ~= stop and nukta[getchar(bn)] then
+ a, b, p = bn, bn, bn
+ end
+ while not allreordered do
+ -- current is always consonant
+ local c = current
+ local n = getnext(current)
+ local l = nil -- used ?
+ if c ~= stop then
+ if nukta[getchar(n)] then
+ c = n
+ n = getnext(n)
+ end
+ if c ~= stop then
+ if halant[getchar(n)] then
+ c = n
+ n = getnext(n)
+ end
+ while c ~= stop and dependent_vowel[getchar(n)] do
+ c = n
+ n = getnext(n)
+ end
+ if c ~= stop then
+ if vowel_modifier[getchar(n)] then
+ c = n
+ n = getnext(n)
+ end
+ if c ~= stop and stress_tone_mark[getchar(n)] then
+ c = n
+ n = getnext(n)
+ end
+ end
+ end
+ end
+ local bp = getprev(firstcons)
+ local cn = getnext(current)
+ local last = getnext(c)
+ while cn ~= last do
+ -- move pre-base matras...
+ if pre_mark[getchar(cn)] then
+ if bp then
+ setfield(bp,"next",cn)
+ end
+ local next = getnext(cn)
+ local prev = getprev(cn)
+ if next then
+ setfield(next,"prev",prev)
+ end
+ setfield(prev,"next",next)
+ if cn == stop then
+ stop = prev
+ end
+ setfield(cn,"prev",bp)
+ setfield(cn,"next",firstcons)
+ setfield(firstcons,"prev",cn)
+ if firstcons == start then
+ if head == start then
+ head = cn
+ end
+ start = cn
+ end
+ break
+ end
+ cn = getnext(cn)
+ end
+ allreordered = c == stop
+ current = getnext(c)
+ end
+
+ if reph or vattu then
+ local current, cns = start, nil
+ while current ~= stop do
+ local c = current
+ local n = getnext(current)
+ if ra[getchar(current)] and halant[getchar(n)] then
+ c = n
+ n = getnext(n)
+ local b, bn = base, base
+ while bn ~= stop do
+ local next = getnext(bn)
+ if dependent_vowel[getchar(next)] then
+ b = next
+ end
+ bn = next
+ end
+ if getprop(current,a_state) == s_rphf then
+ -- position Reph (Ra + H) after post-base 'matra' (if any) since these
+ -- become marks on the 'matra', not on the base glyph
+ if b ~= current then
+ if current == start then
+ if head == start then
+ head = n
+ end
+ start = n
+ end
+ if b == stop then
+ stop = c
+ end
+ local prev = getprev(current)
+ if prev then
+ setfield(prev,"next",n)
+ end
+ if n then
+ setfield(n,"prev",prev)
+ end
+ local next = getnext(b)
+ setfield(c,"next",next)
+ if next then
+ setfield(next,"prev",c)
+ end
+ setfield(c,"next",next)
+ setfield(b,"next",current)
+ setfield(current,"prev",b)
+ end
+ elseif cns and getnext(cns) ~= current then -- todo: optimize next
+ -- position below-base Ra (vattu) following the consonants on which it is placed (either the base consonant or one of the pre-base consonants)
+ local cp, cnsn = getprev(current), getnext(cns)
+ if cp then
+ setfield(cp,"next",n)
+ end
+ if n then
+ setfield(n,"prev",cp)
+ end
+ setfield(cns,"next",current)
+ setfield(current,"prev",cns)
+ setfield(c,"next",cnsn)
+ if cnsn then
+ setfield(cnsn,"prev",c)
+ end
+ if c == stop then
+ stop = cp
+ break
+ end
+ current = getprev(n)
+ end
+ else
+ local char = getchar(current)
+ if consonant[char] then
+ cns = current
+ local next = getnext(cns)
+ if halant[getchar(next)] then
+ cns = next
+ end
+ elseif char == c_nbsp then
+ nbspaces = nbspaces + 1
+ cns = current
+ local next = getnext(cns)
+ if halant[getchar(next)] then
+ cns = next
+ end
+ end
+ end
+ current = getnext(current)
+ end
+ end
+
+ if getchar(base) == c_nbsp then
+ nbspaces = nbspaces - 1
+ head = remove_node(head,base)
+ free_node(base)
+ end
+
+ return head, stop, nbspaces
+end
+
+-- If a pre-base matra character had been reordered before applying basic features,
+-- the glyph can be moved closer to the main consonant based on whether half-forms had been formed.
+-- Actual position for the matra is defined as “after last standalone halant glyph,
+-- after initial matra position and before the main consonant”.
+-- If ZWJ or ZWNJ follow this halant, position is moved after it.
+
+-- so we break out ... this is only done for the first 'word' (if we feed words we can as
+-- well test for non glyph.
+
+function handlers.devanagari_reorder_matras(head,start) -- no leak
+ local current = start -- we could cache attributes here
+ local startfont = getfont(start)
+ local startattr = getprop(start,a_syllabe)
+ -- can be fast loop
+ while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font and getprop(current,a_syllabe) == startattr do
+ local next = getnext(current)
+ if halant[getchar(current)] and not getprop(current,a_state) then
+ if next and getid(next) == glyph_code and getsubtype(next) < 256 and getfont(next) == font and getprop(next,a_syllabe) == startattr and zw_char[getchar(next)] then
+ current = next
+ end
+ local startnext = getnext(start)
+ head = remove_node(head,start)
+ local next = getnext(current)
+ if next then
+ setfield(next,"prev",start)
+ end
+ setfield(start,"next",next)
+ setfield(current,"next",start)
+ setfield(start,"prev",current)
+ start = startnext
+ break
+ end
+ current = next
+ end
+ return head, start, true
+end
+
+-- todo: way more caching of attributes and font
+
+-- Reph’s original position is always at the beginning of the syllable, (i.e. it is not reordered at the character reordering stage).
+-- However, it will be reordered according to the basic-forms shaping results.
+-- Possible positions for reph, depending on the script, are; after main, before post-base consonant forms,
+-- and after post-base consonant forms.
+
+-- 1 If reph should be positioned after post-base consonant forms, proceed to step 5.
+-- 2 If the reph repositioning class is not after post-base: target position is after the first explicit halant glyph between
+-- the first post-reph consonant and last main consonant. If ZWJ or ZWNJ are following this halant, position is moved after it.
+-- If such position is found, this is the target position. Otherwise, proceed to the next step.
+-- Note: in old-implementation fonts, where classifications were fixed in shaping engine,
+-- there was no case where reph position will be found on this step.
+-- 3 If reph should be repositioned after the main consonant: from the first consonant not ligated with main,
+-- or find the first consonant that is not a potential pre-base reordering Ra.
+-- 4 If reph should be positioned before post-base consonant, find first post-base classified consonant not ligated with main.
+-- If no consonant is found, the target position should be before the first matra, syllable modifier sign or vedic sign.
+-- 5 If no consonant is found in steps 3 or 4, move reph to a position immediately before the first post-base matra,
+-- syllable modifier sign or vedic sign that has a reordering class after the intended reph position.
+-- For example, if the reordering position for reph is post-main, it will skip above-base matras that also have a post-main position.
+-- 6 Otherwise, reorder reph to the end of the syllable.
+
+-- hm, this only looks at the start of a nodelist ... is this supposed to be line based?
+
+function handlers.devanagari_reorder_reph(head,start)
+ -- since in Devanagari reph has reordering position 'before postscript' dev2 only follows step 2, 4, and 6,
+ -- the other steps are still ToDo (required for scripts other than dev2)
+ local current = getnext(start)
+ local startnext = nil
+ local startprev = nil
+ local startfont = getfont(start)
+ local startattr = getprop(start,a_syllabe)
+ while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == startfont and getprop(current,a_syllabe) == startattr do --step 2
+ if halant[getchar(current)] and not getprop(current,a_state) then
+ local next = getnext(current)
+ if next and getid(next) == glyph_code and getsubtype(next) < 256 and getfont(next) == startfont and getprop(next,a_syllabe) == startattr and zw_char[getchar(next)] then
+ current = next
+ end
+ startnext = getnext(start)
+ head = remove_node(head,start)
+ local next = getnext(current)
+ if next then
+ setfield(next,"prev",start)
+ end
+ setfield(start,"next",next)
+ setfield(current,"next",start)
+ setfield(start,"prev",current)
+ start = startnext
+ startattr = getprop(start,a_syllabe)
+ break
+ end
+ current = getnext(current)
+ end
+ if not startnext then
+ current = getnext(start)
+ while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == startfont and getprop(current,a_syllabe) == startattr do --step 4
+ if getprop(current,a_state) == s_pstf then --post-base
+ startnext = getnext(start)
+ head = remove_node(head,start)
+ local prev = getprev(current)
+ setfield(start,"prev",prev)
+ setfield(prev,"next",start)
+ setfield(start,"next",current)
+ setfield(current,"prev",start)
+ start = startnext
+ startattr = getprop(start,a_syllabe)
+ break
+ end
+ current = getnext(current)
+ end
+ end
+ -- ToDo: determine position for reph with reordering position other than 'before postscript'
+ -- (required for scripts other than dev2)
+ -- leaks
+ if not startnext then
+ current = getnext(start)
+ local c = nil
+ while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == startfont and getprop(current,a_syllabe) == startattr do --step 5
+ if not c then
+ local char = getchar(current)
+ -- todo: combine in one
+ if mark_above_below_post[char] and reorder_class[char] ~= "after subscript" then
+ c = current
+ end
+ end
+ current = getnext(current)
+ end
+ -- here we can loose the old start node: maybe best split cases
+ if c then
+ startnext = getnext(start)
+ head = remove_node(head,start)
+ local prev = getprev(c)
+ setfield(start,"prev",prev)
+ setfield(prev,"next",start)
+ setfield(start,"next",c)
+ setfield(c,"prev",start)
+ -- end
+ start = startnext
+ startattr = getprop(start,a_syllabe)
+ end
+ end
+ -- leaks
+ if not startnext then
+ current = start
+ local next = getnext(current)
+ while next and getid(next) == glyph_code and getsubtype(next) < 256 and getfont(next) == startfont and getprop(next,a_syllabe) == startattr do --step 6
+ current = next
+ next = getnext(current)
+ end
+ if start ~= current then
+ startnext = getnext(start)
+ head = remove_node(head,start)
+ local next = getnext(current)
+ if next then
+ setfield(next,"prev",start)
+ end
+ setfield(start,"next",next)
+ setfield(current,"next",start)
+ setfield(start,"prev",current)
+ start = startnext
+ end
+ end
+ --
+ return head, start, true
+end
+
+-- we can cache some checking (v)
+
+-- If a pre-base reordering consonant is found, reorder it according to the following rules:
+--
+-- 1 Only reorder a glyph produced by substitution during application of the feature.
+-- (Note that a font may shape a Ra consonant with the feature generally but block it in certain contexts.)
+-- 2 Try to find a target position the same way as for pre-base matra. If it is found, reorder pre-base consonant glyph.
+-- 3 If position is not found, reorder immediately before main consonant.
+
+-- UNTESTED: NOT CALLED IN EXAMPLE
+
+function handlers.devanagari_reorder_pre_base_reordering_consonants(head,start)
+ local current = start
+ local startnext = nil
+ local startprev = nil
+ local startfont = getfont(start)
+ local startattr = getprop(start,a_syllabe)
+ -- can be fast for loop + caching state
+ while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == startfont and getprop(current,a_syllabe) == startattr do
+ local next = getnext(current)
+ if halant[getchar(current)] and not getprop(current,a_state) then
+ if next and getid(next) == glyph_code and getsubtype(next) < 256 and getfont(next) == font and getprop(next,a_syllabe) == startattr then
+ local char = getchar(next)
+ if char == c_zwnj or char == c_zwj then
+ current = next
+ end
+ end
+ startnext = getnext(start)
+ removenode(start,start)
+ local next = getnext(current)
+ if next then
+ setfield(next,"prev",start)
+ end
+ setfield(start,"next",next)
+ setfield(current,"next",start)
+ setfield(start,"prev",current)
+ start = startnext
+ break
+ end
+ current = next
+ end
+ if not startnext then
+ current = getnext(start)
+ startattr = getprop(start,a_syllabe)
+ while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == startfont and getprop(current,a_syllabe) == startattr do
+ if not consonant[getchar(current)] and getprop(current,a_state) then --main
+ startnext = getnext(start)
+ removenode(start,start)
+ local prev = getprev(current)
+ setfield(start,"prev",prev)
+ setfield(prev,"next",start)
+ setfield(start,"next",current)
+ setfield(current,"prev",start)
+ start = startnext
+ break
+ end
+ current = getnext(current)
+ end
+ end
+ return head, start, true
+end
+
+function handlers.devanagari_remove_joiners(head,start)
+ local stop = getnext(start)
+ local startfont = getfont(start)
+ while stop and getid(stop) == glyph_code and getsubtype(stop) < 256 and getfont(stop) == startfont do
+ local char = getchar(stop)
+ if char == c_zwnj or char == c_zwj then
+ stop = getnext(stop)
+ else
+ break
+ end
+ end
+ if stop then
+ setfield(getfield(stop,"prev"),"next",nil)
+ setfield(stop,"prev",getprev(start))
+ end
+ local prev = getprev(start)
+ if prev then
+ setfield(prev,"next",stop)
+ end
+ if head == start then
+ head = stop
+ end
+ flush_list(start)
+ return head, stop, true
+end
+
+local function dev2_initialize(font,attr)
+
+ local devanagari = fontdata[font].resources.devanagari
+
+ if devanagari then
+ return devanagari.seqsubset or { }, devanagari.reorderreph or { }
+ else
+ return { }, { }
+ end
+
+end
+
+-- this one will be merged into the caller: it saves a call, but we will then make function
+-- of the actions
+
+local function dev2_reorder(head,start,stop,font,attr,nbspaces) -- maybe do a pass over (determine stop in sweep)
+
+ local seqsubset, reorderreph = dev2_initialize(font,attr)
+
+ local reph = false -- was nil ... probably went unnoticed because never assigned
+ local halfpos = nil
+ local basepos = nil
+ local subpos = nil
+ local postpos = nil
+ local locl = { }
+
+ for i=1,#seqsubset do
+
+ -- maybe quit if start == stop
+
+ local subset = seqsubset[i]
+ local kind = subset[1]
+ local lookupcache = subset[2]
+ if kind == "rphf" then
+ for k, v in next, ra do
+ local r = lookupcache[k]
+ if r then
+ for k, v in next, halant do
+ local h = r[k]
+ if h then
+ reph = h.ligature or false
+ break
+ end
+ end
+ if reph then
+ break
+ end
+ end
+ end
+ local current = start
+ local last = getnext(stop)
+ while current ~= last do
+ if current ~= stop then
+ local c = locl[current] or getchar(current)
+ local found = lookupcache[c]
+ if found then
+ local next = getnext(current)
+ local n = locl[next] or getchar(next)
+ if found[n] then --above-base: rphf Consonant + Halant
+ local afternext = next ~= stop and getnext(next)
+ if afternext and zw_char[getchar(afternext)] then -- ZWJ and ZWNJ prevent creation of reph
+ current = next
+ current = getnext(current)
+ elseif current == start then
+ setprop(current,a_state,s_rphf)
+ current = next
+ else
+ current = next
+ end
+ end
+ end
+ end
+ current = getnext(current)
+ end
+ elseif kind == "pref" then
+ local current = start
+ local last = getnext(stop)
+ while current ~= last do
+ if current ~= stop then
+ local c = locl[current] or getchar(current)
+ local found = lookupcache[c]
+ if found then
+ local next = getnext(current)
+ local n = locl[next] or getchar(next)
+ if found[n] then
+ setprop(current,a_state,s_pref)
+ setprop(next,a_state,s_pref)
+ current = next
+ end
+ end
+ end
+ current = getnext(current)
+ end
+ elseif kind == "half" then -- half forms: half / Consonant + Halant
+ local current = start
+ local last = getnext(stop)
+ while current ~= last do
+ if current ~= stop then
+ local c = locl[current] or getchar(current)
+ local found = lookupcache[c]
+ if found then
+ local next = getnext(current)
+ local n = locl[next] or getchar(next)
+ if found[n] then
+ if next ~= stop and getchar(getnext(next)) == c_zwnj then -- zwnj prevent creation of half
+ current = next
+ else
+ setprop(current,a_state,s_half)
+ if not halfpos then
+ halfpos = current
+ end
+ end
+ current = getnext(current)
+ end
+ end
+ end
+ current = getnext(current)
+ end
+ elseif kind == "blwf" then -- below-base: blwf / Halant + Consonant
+ local current = start
+ local last = getnext(stop)
+ while current ~= last do
+ if current ~= stop then
+ local c = locl[current] or getchar(current)
+ local found = lookupcache[c]
+ if found then
+ local next = getnext(current)
+ local n = locl[next] or getchar(next)
+ if found[n] then
+ setprop(current,a_state,s_blwf)
+ setprop(next,a_state,s_blwf)
+ current = next
+ subpos = current
+ end
+ end
+ end
+ current = getnext(current)
+ end
+ elseif kind == "pstf" then -- post-base: pstf / Halant + Consonant
+ local current = start
+ local last = getnext(stop)
+ while current ~= last do
+ if current ~= stop then
+ local c = locl[current] or getchar(current)
+ local found = lookupcache[c]
+ if found then
+ local next = getnext(current)
+ local n = locl[next] or getchar(next)
+ if found[n] then
+ setprop(current,a_state,s_pstf)
+ setprop(next,a_state,s_pstf)
+ current = next
+ postpos = current
+ end
+ end
+ end
+ current = getnext(current)
+ end
+ end
+ end
+
+ -- this one changes per word ...
+
+ reorderreph.coverage = { [reph] = true } -- neat
+
+ -- end of weird
+
+ local current, base, firstcons = start, nil, nil
+
+ if getprop(start,a_state) == s_rphf then
+ -- if syllable starts with Ra + H and script has 'Reph' then exclude Reph from candidates for base consonants
+ current = getnext(getnext(start))
+ end
+
+ if current ~= getnext(stop) and getchar(current) == c_nbsp then
+ -- Stand Alone cluster
+ if current == stop then
+ stop = getprev(stop)
+ head = remove_node(head,current)
+ free_node(current)
+ return head, stop, nbspaces
+ else
+ nbspaces = nbspaces + 1
+ base = current
+ current = getnext(current)
+ if current ~= stop then
+ local char = getchar(current)
+ if nukta[char] then
+ current = getnext(current)
+ char = getchar(current)
+ end
+ if char == c_zwj then
+ local next = getnext(current)
+ if current ~= stop and next ~= stop and halant[getchar(next)] then
+ current = next
+ next = getnext(current)
+ local tmp = getnext(next)
+ local changestop = next == stop
+ setfield(next,"next",nil)
+ setprop(current,a_state,s_pref)
+ current = processcharacters(current,font)
+ setprop(current,a_state,s_blwf)
+ current = processcharacters(current,font)
+ setprop(current,a_state,s_pstf)
+ current = processcharacters(current,font)
+ setprop(current,a_state,unsetvalue)
+ if halant[getchar(current)] then
+ setfield(getnext(current),"next",tmp)
+ local nc = copy_node(current)
+ copyinjection(nc,current)
+ setfield(current,"char",dotted_circle)
+ head = insert_node_after(head,current,nc)
+ else
+ setfield(current,"next",tmp) -- assumes that result of pref, blwf, or pstf consists of one node
+ if changestop then
+ stop = current
+ end
+ end
+ end
+ end
+ end
+ end
+ else -- not Stand Alone cluster
+ local last = getnext(stop)
+ while current ~= last do -- find base consonant
+ local next = getnext(current)
+ if consonant[getchar(current)] then
+ if not (current ~= stop and next ~= stop and halant[getchar(next)] and getchar(getnext(next)) == c_zwj) then
+ if not firstcons then
+ firstcons = current
+ end
+ -- check whether consonant has below-base or post-base form or is pre-base reordering Ra
+ local a = getprop(current,a_state)
+ if not (a == s_pref or a == s_blwf or a == s_pstf) then
+ base = current
+ end
+ end
+ end
+ current = next
+ end
+ if not base then
+ base = firstcons
+ end
+ end
+
+ if not base then
+ if getprop(start,a_state) == s_rphf then
+ setprop(start,a_state,unsetvalue)
+ end
+ return head, stop, nbspaces
+ else
+ if getprop(base,a_state) then
+ setprop(base,a_state,unsetvalue)
+ end
+ basepos = base
+ end
+ if not halfpos then
+ halfpos = base
+ end
+ if not subpos then
+ subpos = base
+ end
+ if not postpos then
+ postpos = subpos or base
+ end
+
+ -- Matra characters are classified and reordered by which consonant in a conjunct they have affinity for
+
+ local moved = { }
+ local current = start
+ local last = getnext(stop)
+ while current ~= last do
+ local char, target, cn = locl[current] or getchar(current), nil, getnext(current)
+ -- not so efficient (needed for malayalam)
+ local tpm = twopart_mark[char]
+ if tpm then
+ local extra = copy_node(current)
+ copyinjection(extra,current)
+ char = tpm[1]
+ setfield(current,"char",char)
+ setfield(extra,"char",tpm[2])
+ head = insert_node_after(head,current,extra)
+ end
+ --
+ if not moved[current] and dependent_vowel[char] then
+ if pre_mark[char] then -- Before first half form in the syllable
+ moved[current] = true
+ local prev = getprev(current)
+ local next = getnext(current)
+ if prev then
+ setfield(prev,"next",next)
+ end
+ if next then
+ setfield(next,"prev",prev)
+ end
+ if current == stop then
+ stop = getprev(current)
+ end
+ if halfpos == start then
+ if head == start then
+ head = current
+ end
+ start = current
+ end
+ local prev = getprev(halfpos)
+ if prev then
+ setfield(prev,"next",current)
+ end
+ setfield(current,"prev",prev)
+ setfield(halfpos,"prev",current)
+ setfield(current,"next",halfpos)
+ halfpos = current
+ elseif above_mark[char] then -- After main consonant
+ target = basepos
+ if subpos == basepos then
+ subpos = current
+ end
+ if postpos == basepos then
+ postpos = current
+ end
+ basepos = current
+ elseif below_mark[char] then -- After subjoined consonants
+ target = subpos
+ if postpos == subpos then
+ postpos = current
+ end
+ subpos = current
+ elseif post_mark[char] then -- After post-form consonant
+ target = postpos
+ postpos = current
+ end
+ if mark_above_below_post[char] then
+ local prev = getprev(current)
+ if prev ~= target then
+ local next = getnext(current)
+ if prev then -- not needed, already tested with target
+ setfield(prev,"next",next)
+ end
+ if next then
+ setfield(next,"prev",prev)
+ end
+ if current == stop then
+ stop = prev
+ end
+ local next = getnext(target)
+ if next then
+ setfield(next,"prev",current)
+ end
+ setfield(current,"next",next)
+ setfield(target,"next",current)
+ setfield(current,"prev",target)
+ end
+ end
+ end
+ current = cn
+ end
+
+ -- Reorder marks to canonical order: Adjacent nukta and halant or nukta and vedic sign are always repositioned if necessary, so that the nukta is first.
+
+ local current, c = start, nil
+ while current ~= stop do
+ local char = getchar(current)
+ if halant[char] or stress_tone_mark[char] then
+ if not c then
+ c = current
+ end
+ else
+ c = nil
+ end
+ local next = getnext(current)
+ if c and nukta[getchar(next)] then
+ if head == c then
+ head = next
+ end
+ if stop == next then
+ stop = current
+ end
+ local prev = getprev(c)
+ if prev then
+ setfield(prev,"next",next)
+ end
+ setfield(next,"prev",prev)
+ local nextnext = getnext(next)
+ setfield(current,"next",nextnext)
+ local nextnextnext = getnext(nextnext)
+ if nextnextnext then
+ setfield(nextnextnext,"prev",current)
+ end
+ setfield(c,"prev",nextnext)
+ setfield(nextnext,"next",c)
+ end
+ if stop == current then break end
+ current = getnext(current)
+ end
+
+ if getchar(base) == c_nbsp then
+ nbspaces = nbspaces - 1
+ head = remove_node(head, base)
+ free_node(base)
+ end
+
+ return head, stop, nbspaces
+end
+
+-- cleaned up and optimized ... needs checking (local, check order, fixes, extra hash, etc)
+
+local separator = { }
+
+imerge(separator,consonant)
+imerge(separator,independent_vowel)
+imerge(separator,dependent_vowel)
+imerge(separator,vowel_modifier)
+imerge(separator,stress_tone_mark)
+
+for k, v in next, nukta do separator[k] = true end
+for k, v in next, halant do separator[k] = true end
+
+local function analyze_next_chars_one(c,font,variant) -- skip one dependent vowel
+ -- why two variants ... the comment suggests that it's the same ruleset
+ local n = getnext(c)
+ if not n then
+ return c
+ end
+ if variant == 1 then
+ local v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ if v and nukta[getchar(n)] then
+ n = getnext(n)
+ if n then
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ end
+ end
+ if n and v then
+ local nn = getnext(n)
+ if nn and getid(nn) == glyph_code and getsubtype(nn) < 256 and getfont(nn) == font then
+ local nnn = getnext(nn)
+ if nnn and getid(nnn) == glyph_code and getsubtype(nnn) < 256 and getfont(nnn) == font then
+ local nnc = getchar(nn)
+ local nnnc = getchar(nnn)
+ if nnc == c_zwj and consonant[nnnc] then
+ c = nnn
+ elseif (nnc == c_zwnj or nnc == c_zwj) and halant[nnnc] then
+ local nnnn = getnext(nnn)
+ if nnnn and getid(nnnn) == glyph_code and consonant[getchar(nnnn)] and getsubtype(nnnn) < 256 and getfont(nnnn) == font then
+ c = nnnn
+ end
+ end
+ end
+ end
+ end
+ elseif variant == 2 then
+ if getid(n) == glyph_code and nukta[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
+ c = n
+ end
+ n = getnext(c)
+ if n and getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font then
+ local nn = getnext(n)
+ if nn then
+ local nv = getid(nn) == glyph_code and getsubtype(nn) < 256 and getfont(nn) == font
+ if nv and zw_char[getchar(n)] then
+ n = nn
+ nn = getnext(nn)
+ nv = nn and getid(nn) == glyph_code and getsubtype(nn) < 256 and getfont(nn) == font
+ end
+ if nv and halant[getchar(n)] and consonant[getchar(nn)] then
+ c = nn
+ end
+ end
+ end
+ end
+ -- c = ms_matra(c)
+ local n = getnext(c)
+ if not n then
+ return c
+ end
+ local v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ if not v then
+ return c
+ end
+ local char = getchar(n)
+ if dependent_vowel[char] then
+ c = getnext(c)
+ n = getnext(c)
+ if not n then
+ return c
+ end
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ if not v then
+ return c
+ end
+ char = getchar(n)
+ end
+ if nukta[char] then
+ c = getnext(c)
+ n = getnext(c)
+ if not n then
+ return c
+ end
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ if not v then
+ return c
+ end
+ char = getchar(n)
+ end
+ if halant[char] then
+ c = getnext(c)
+ n = getnext(c)
+ if not n then
+ return c
+ end
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ if not v then
+ return c
+ end
+ char = getchar(n)
+ end
+ if vowel_modifier[char] then
+ c = getnext(c)
+ n = getnext(c)
+ if not n then
+ return c
+ end
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ if not v then
+ return c
+ end
+ char = getchar(n)
+ end
+ if stress_tone_mark[char] then
+ c = getnext(c)
+ n = getnext(c)
+ if not n then
+ return c
+ end
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ if not v then
+ return c
+ end
+ char = getchar(n)
+ end
+ if stress_tone_mark[char] then
+ return n
+ else
+ return c
+ end
+end
+
+local function analyze_next_chars_two(c,font)
+ local n = getnext(c)
+ if not n then
+ return c
+ end
+ if getid(n) == glyph_code and nukta[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
+ c = n
+ end
+ n = c
+ while true do
+ local nn = getnext(n)
+ if nn and getid(nn) == glyph_code and getsubtype(nn) < 256 and getfont(nn) == font then
+ local char = getchar(nn)
+ if halant[char] then
+ n = nn
+ local nnn = getnext(nn)
+ if nnn and getid(nnn) == glyph_code and zw_char[getchar(nnn)] and getsubtype(nnn) < 256 and getfont(nnn) == font then
+ n = nnn
+ end
+ elseif char == c_zwnj or char == c_zwj then
+ -- n = nn -- not here (?)
+ local nnn = getnext(nn)
+ if nnn and getid(nnn) == glyph_code and halant[getchar(nnn)] and getsubtype(nnn) < 256 and getfont(nnn) == font then
+ n = nnn
+ end
+ else
+ break
+ end
+ local nn = getnext(n)
+ if nn and getid(nn) == glyph_code and consonant[getchar(nn)] and getsubtype(nn) < 256 and getfont(nn) == font then
+ n = nn
+ local nnn = getnext(nn)
+ if nnn and getid(nnn) == glyph_code and nukta[getchar(nnn)] and getsubtype(nnn) < 256 and getfont(nnn) == font then
+ n = nnn
+ end
+ c = n
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ --
+ if not c then
+ -- This shouldn't happen I guess.
+ return
+ end
+ local n = getnext(c)
+ if not n then
+ return c
+ end
+ local v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ if not v then
+ return c
+ end
+ local char = getchar(n)
+ if char == c_anudatta then
+ c = n
+ n = getnext(c)
+ if not n then
+ return c
+ end
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ if not v then
+ return c
+ end
+ char = getchar(n)
+ end
+ if halant[char] then
+ c = getnext(c)
+ n = getnext(c)
+ if not n then
+ return c
+ end
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ if not v then
+ return c
+ end
+ char = getchar(n)
+ if char == c_zwnj or char == c_zwj then
+ c = getnext(c)
+ n = getnext(c)
+ if not n then
+ return c
+ end
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ if not v then
+ return c
+ end
+ char = getchar(n)
+ end
+ else
+ -- c = ms_matra(c)
+ -- same as one
+ if dependent_vowel[char] then
+ c = getnext(c)
+ n = getnext(c)
+ if not n then
+ return c
+ end
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ if not v then
+ return c
+ end
+ char = getchar(n)
+ end
+ if nukta[char] then
+ c = getnext(c)
+ n = getnext(c)
+ if not n then
+ return c
+ end
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ if not v then
+ return c
+ end
+ char = getchar(n)
+ end
+ if halant[char] then
+ c = getnext(c)
+ n = getnext(c)
+ if not n then
+ return c
+ end
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ if not v then
+ return c
+ end
+ char = getchar(n)
+ end
+ end
+ -- same as one
+ if vowel_modifier[char] then
+ c = getnext(c)
+ n = getnext(c)
+ if not n then
+ return c
+ end
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ if not v then
+ return c
+ end
+ char = getchar(n)
+ end
+ if stress_tone_mark[char] then
+ c = getnext(c)
+ n = getnext(c)
+ if not n then
+ return c
+ end
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ if not v then
+ return c
+ end
+ char = getchar(n)
+ end
+ if stress_tone_mark[char] then
+ return n
+ else
+ return c
+ end
+end
+
+local function inject_syntax_error(head,current,mark)
+ local signal = copy_node(current)
+ copyinjection(signal,current)
+ if mark == pre_mark then -- THIS IS WRONG: pre_mark is a table
+ setfield(signal,"char",dotted_circle)
+ else
+ setfield(current,"char",dotted_circle)
+ end
+ return insert_node_after(head,current,signal)
+end
+
+-- It looks like these two analyzers were written independently but they share
+-- a lot. Common code has been synced.
+
+function methods.deva(head,font,attr)
+ head = tonut(head)
+ local current = head
+ local start = true
+ local done = false
+ local nbspaces = 0
+ while current do
+ if getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font then
+ done = true
+ local syllablestart = current
+ local syllableend = nil
+ local c = current
+ local n = getnext(c)
+ if n and ra[getchar(c)] and getid(n) == glyph_code and halant[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
+ local n = getnext(n)
+ if n and getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font then
+ c = n
+ end
+ end
+ local standalone = getchar(c) == c_nbsp
+ if standalone then
+ local prev = getprev(current)
+ if not prev then
+ -- begin of paragraph or box
+ elseif getid(prev) ~= glyph_code or getsubtype(prev) >= 256 or getfont(prev) ~= font then
+ -- different font or language so quite certainly a different word
+ elseif not separator[getchar(prev)] then
+ -- something that separates words
+ else
+ standalone = false
+ end
+ end
+ if standalone then
+ -- stand alone cluster (at the start of the word only): #[Ra+H]+NBSP+[N]+[<[<ZWJ|ZWNJ>]+H+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
+ local syllableend = analyze_next_chars_one(c,font,2)
+ current = getnext(syllableend)
+ if syllablestart ~= syllableend then
+ head, current, nbspaces = deva_reorder(head,syllablestart,syllableend,font,attr,nbspaces)
+ current = getnext(current)
+ end
+ else
+ -- we can delay the getsubtype(n) and getfont(n) and test for say halant first
+ -- as an table access is faster than two function calls (subtype and font are
+ -- pseudo fields) but the code becomes messy (unless we make it a function)
+ local char = getchar(current)
+ if consonant[char] then
+ -- syllable containing consonant
+ local prevc = true
+ while prevc do
+ prevc = false
+ local n = getnext(current)
+ if not n then
+ break
+ end
+ local v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ if not v then
+ break
+ end
+ local c = getchar(n)
+ if nukta[c] then
+ n = getnext(n)
+ if not n then
+ break
+ end
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ if not v then
+ break
+ end
+ c = getchar(n)
+ end
+ if halant[c] then
+ n = getnext(n)
+ if not n then
+ break
+ end
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ if not v then
+ break
+ end
+ c = getchar(n)
+ if c == c_zwnj or c == c_zwj then
+ n = getnext(n)
+ if not n then
+ break
+ end
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ if not v then
+ break
+ end
+ c = getchar(n)
+ end
+ if consonant[c] then
+ prevc = true
+ current = n
+ end
+ end
+ end
+ local n = getnext(current)
+ if n and getid(n) == glyph_code and nukta[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
+ -- nukta (not specified in Microsft Devanagari OpenType specification)
+ current = n
+ n = getnext(current)
+ end
+ syllableend = current
+ current = n
+ if current then
+ local v = getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font
+ if v then
+ if halant[getchar(current)] then
+ -- syllable containing consonant without vowels: {C + [Nukta] + H} + C + H
+ local n = getnext(current)
+ if n and getid(n) == glyph_code and zw_char[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
+ -- code collapsed, probably needs checking with intention
+ syllableend = n
+ current = getnext(n)
+ else
+ syllableend = current
+ current = n
+ end
+ else
+ -- syllable containing consonant with vowels: {C + [Nukta] + H} + C + [M] + [VM] + [SM]
+ local c = getchar(current)
+ if dependent_vowel[c] then
+ syllableend = current
+ current = getnext(current)
+ v = current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font
+ if v then
+ c = getchar(current)
+ end
+ end
+ if v and vowel_modifier[c] then
+ syllableend = current
+ current = getnext(current)
+ v = current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font
+ if v then
+ c = getchar(current)
+ end
+ end
+ if v and stress_tone_mark[c] then
+ syllableend = current
+ current = getnext(current)
+ end
+ end
+ end
+ end
+ if syllablestart ~= syllableend then
+ head, current, nbspaces = deva_reorder(head,syllablestart,syllableend,font,attr,nbspaces)
+ current = getnext(current)
+ end
+ elseif independent_vowel[char] then
+ -- syllable without consonants: VO + [VM] + [SM]
+ syllableend = current
+ current = getnext(current)
+ if current then
+ local v = getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font
+ if v then
+ local c = getchar(current)
+ if vowel_modifier[c] then
+ syllableend = current
+ current = getnext(current)
+ v = current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font
+ if v then
+ c = getchar(current)
+ end
+ end
+ if v and stress_tone_mark[c] then
+ syllableend = current
+ current = getnext(current)
+ end
+ end
+ end
+ else
+ local mark = mark_four[char]
+ if mark then
+ head, current = inject_syntax_error(head,current,mark)
+ end
+ current = getnext(current)
+ end
+ end
+ else
+ current = getnext(current)
+ end
+ start = false
+ end
+
+ if nbspaces > 0 then
+ head = replace_all_nbsp(head)
+ end
+
+ head = tonode(head)
+
+ return head, done
+end
+
+-- there is a good change that when we run into one with subtype < 256 that the rest is also done
+-- so maybe we can omit this check (it's pretty hard to get glyphs in the stream out of the blue)
+
+function methods.dev2(head,font,attr)
+ head = tonut(head)
+ local current = head
+ local start = true
+ local done = false
+ local syllabe = 0
+ local nbspaces = 0
+ while current do
+ local syllablestart, syllableend = nil, nil
+ if getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font then
+ done = true
+ syllablestart = current
+ local c = current
+ local n = getnext(current)
+ if n and ra[getchar(c)] and getid(n) == glyph_code and halant[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
+ local n = getnext(n)
+ if n and getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font then
+ c = n
+ end
+ end
+ local char = getchar(c)
+ if independent_vowel[char] then
+ -- vowel-based syllable: [Ra+H]+V+[N]+[<[<ZWJ|ZWNJ>]+H+C|ZWJ+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
+ current = analyze_next_chars_one(c,font,1)
+ syllableend = current
+ else
+ local standalone = char == c_nbsp
+ if standalone then
+ nbspaces = nbspaces + 1
+ local p = getprev(current)
+ if not p then
+ -- begin of paragraph or box
+ elseif getid(p) ~= glyph_code or getsubtype(p) >= 256 or getfont(p) ~= font then
+ -- different font or language so quite certainly a different word
+ elseif not separator[getchar(p)] then
+ -- something that separates words
+ else
+ standalone = false
+ end
+ end
+ if standalone then
+ -- Stand Alone cluster (at the start of the word only): #[Ra+H]+NBSP+[N]+[<[<ZWJ|ZWNJ>]+H+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
+ current = analyze_next_chars_one(c,font,2)
+ syllableend = current
+ elseif consonant[getchar(current)] then
+ -- WHY current INSTEAD OF c ?
+
+ -- Consonant syllable: {C+[N]+<H+[<ZWNJ|ZWJ>]|<ZWNJ|ZWJ>+H>} + C+[N]+[A] + [< H+[<ZWNJ|ZWJ>] | {M}+[N]+[H]>]+[SM]+[(VD)]
+ current = analyze_next_chars_two(current,font) -- not c !
+ syllableend = current
+ end
+ end
+ end
+ if syllableend then
+ syllabe = syllabe + 1
+ local c = syllablestart
+ local n = getnext(syllableend)
+ while c ~= n do
+ setprop(c,a_syllabe,syllabe)
+ c = getnext(c)
+ end
+ end
+ if syllableend and syllablestart ~= syllableend then
+ head, current, nbspaces = dev2_reorder(head,syllablestart,syllableend,font,attr,nbspaces)
+ end
+ if not syllableend and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font and not getprop(current,a_state) then
+ local mark = mark_four[getchar(current)]
+ if mark then
+ head, current = inject_syntax_error(head,current,mark)
+ end
+ end
+ start = false
+ current = getnext(current)
+ end
+
+ if nbspaces > 0 then
+ head = replace_all_nbsp(head)
+ end
+
+ head = tonode(head)
+
+ return head, done
+end
+
+methods.mlym = methods.deva
+methods.mlm2 = methods.dev2
diff --git a/tex/context/base/font-otc.lua b/tex/context/base/font-otc.lua
index db8587741..cc3a6a122 100644
--- a/tex/context/base/font-otc.lua
+++ b/tex/context/base/font-otc.lua
@@ -27,9 +27,17 @@ local setmetatableindex = table.setmetatableindex
-- the mkiv representation. And as the fontloader interface is modelled
-- after fontforge we cannot change that one too much either.
+local normalized = {
+ substitution = "substitution",
+ single = "substitution",
+ ligature = "ligature",
+ alternate = "alternate",
+ multiple = "multiple",
+}
+
local types = {
- substitution = "gsub_single",
single = "gsub_single",
+ substitution = "gsub_single",
ligature = "gsub_ligature",
alternate = "gsub_alternate",
multiple = "gsub_multiple",
@@ -43,140 +51,220 @@ local noflags = { }
local function addfeature(data,feature,specifications)
local descriptions = data.descriptions
local resources = data.resources
- local lookups = resources.lookups
- local gsubfeatures = resources.features.gsub
+ local features = resources.features
+ local sequences = resources.sequences
+ if not features or not sequences then
+ return
+ end
+ local gsubfeatures = features.gsub
if gsubfeatures and gsubfeatures[feature] then
- -- already present
- else
- local sequences = resources.sequences
- local fontfeatures = resources.features or everywhere
- local unicodes = resources.unicodes
- local lookuptypes = resources.lookuptypes
- local splitter = lpeg.splitter(" ",unicodes)
- local done = 0
- local skip = 0
- if not specifications[1] then
- -- so we accept a one entry specification
- specifications = { specifications }
+ return -- already present
+ end
+ local fontfeatures = resources.features or everywhere
+ local unicodes = resources.unicodes
+ local splitter = lpeg.splitter(" ",unicodes)
+ local done = 0
+ local skip = 0
+ if not specifications[1] then
+ -- so we accept a one entry specification
+ specifications = { specifications }
+ end
+
+ local function tounicode(code)
+ if not code then
+ return
+ elseif type(code) == "number" then
+ return code
+ else
+ return unicodes[code] or utfbyte(code)
end
- -- subtables are tables themselves but we also accept flattened singular subtables
- for s=1,#specifications do
- local specification = specifications[s]
- local valid = specification.valid
- if not valid or valid(data,specification,feature) then
- local initialize = specification.initialize
- if initialize then
- -- when false is returned we initialize only once
- specification.initialize = initialize(specification) and initialize or nil
- end
- local askedfeatures = specification.features or everywhere
- local subtables = specification.subtables or { specification.data } or { }
- local featuretype = types[specification.type or "substitution"]
- local featureflags = specification.flags or noflags
- local featureorder = specification.order or { feature }
- local added = false
- local featurename = format("ctx_%s_%s",feature,s)
- local st = { }
- for t=1,#subtables do
- local list = subtables[t]
- local full = format("%s_%s",featurename,t)
- st[t] = full
- if featuretype == "gsub_ligature" then
- lookuptypes[full] = "ligature"
- for code, ligature in next, list do
- local unicode = tonumber(code) or unicodes[code]
- local description = descriptions[unicode]
- if description then
- if type(ligature) == "string" then
- ligature = { lpegmatch(splitter,ligature) }
- end
- local present = true
- for i=1,#ligature do
- if not descriptions[ligature[i]] then
- present = false
- break
- end
- end
- if present then
- local slookups = description.slookups
- if slookups then
- slookups[full] = ligature
- else
- description.slookups = { [full] = ligature }
- end
- done, added = done + 1, true
+ end
+
+ local coverup = otf.coverup
+ local coveractions = coverup.actions
+ local stepkey = coverup.stepkey
+ local register = coverup.register
+
+ for s=1,#specifications do
+ local specification = specifications[s]
+ local valid = specification.valid
+ if not valid or valid(data,specification,feature) then
+ local initialize = specification.initialize
+ if initialize then
+ -- when false is returned we initialize only once
+ specification.initialize = initialize(specification) and initialize or nil
+ end
+ local askedfeatures = specification.features or everywhere
+ local askedsteps = specifications.steps or specification.subtables or { specification.data } or { }
+ local featuretype = normalized[specification.type or "substitution"] or "substitution"
+ local featureflags = specification.flags or noflags
+ local featureorder = specification.order or { feature }
+ local added = false
+ local nofsteps = 0
+ local steps = { }
+ for i=1,#askedsteps do
+ local list = askedsteps[i]
+ local coverage = { }
+ local cover = coveractions[featuretype]
+ if not cover then
+ -- unknown
+ elseif featuretype == "ligature" then
+ for code, ligature in next, list do
+ local unicode = tounicode(code)
+ local description = descriptions[unicode]
+ if description then
+ if type(ligature) == "string" then
+ ligature = { lpegmatch(splitter,ligature) }
+ end
+ local present = true
+ for i=1,#ligature do
+ local l = ligature[i]
+ local u = tounicode(l)
+ if descriptions[u] then
+ ligature[i] = u
else
- skip = skip + 1
+ present = false
+ break
end
end
+ if present then
+ cover(coverage,unicode,ligature)
+ done = done + 1
+ else
+ skip = skip + 1
+ end
+ else
+ skip = skip + 1
end
- elseif featuretype == "gsub_single" then
- lookuptypes[full] = "substitution"
- for code, replacement in next, list do
- local unicode = tonumber(code) or unicodes[code]
- local description = descriptions[unicode]
- if description then
- replacement = tonumber(replacement) or unicodes[replacement]
- if descriptions[replacement] then
- local slookups = description.slookups
- if slookups then
- slookups[full] = replacement
- else
- description.slookups = { [full] = replacement }
- end
- done, added = done + 1, true
- end
+ end
+ elseif featuretype == "substitution" then
+ for code, replacement in next, list do
+ local unicode = tounicode(code)
+ local description = descriptions[unicode]
+ if description then
+ if type(replacement) == "table" then
+ replacement = replacement[1]
+ end
+ replacement = tounicode(replacement)
+ if replacement and descriptions[replacement] then
+ cover(coverage,unicode,replacement)
+ done = done + 1
+ else
+ skip = skip + 1
end
+ else
+ skip = skip + 1
end
end
- end
- if added then
- -- script = { lang1, lang2, lang3 } or script = { lang1 = true, ... }
- for k, v in next, askedfeatures do
- if v[1] then
- askedfeatures[k] = table.tohash(v)
+ elseif featuretype == "alternate" then
+ for code, replacement in next, list do
+ local unicode = tounicode(code)
+ local description = descriptions[unicode]
+ if not description then
+ skip = skip + 1
+ elseif type(replacement) == "table" then
+ local r = { }
+ for i=1,#replacement do
+ local u = tounicode(replacement[i])
+ r[i] = descriptions[u] and u or unicode
+ end
+ cover(coverage,unicode,r)
+ done = done + 1
+ else
+ local u = tounicode(replacement)
+ if u then
+ cover(coverage,unicode,{ u })
+ done = done + 1
+ else
+ skip = skip + 1
+ end
end
end
- local sequence = {
- chain = 0,
- features = { [feature] = askedfeatures },
- flags = featureflags,
- name = featurename,
- order = featureorder,
- subtables = st,
- type = featuretype,
- }
- if specification.prepend then
- insert(sequences,1,sequence)
- else
- insert(sequences,sequence)
+ elseif featuretype == "multiple" then -- todo: unicode can be table
+ for code, replacement in next, list do
+ local unicode = tounicode(code)
+ local description = descriptions[unicode]
+ if not description then
+ skip = skip + 1
+ elseif type(replacement) == "table" then
+ local r, n = { }, 0
+ for i=1,#replacement do
+ local u = tounicode(replacement[i])
+ if descriptions[u] then
+ n = n + 1
+ r[n] = u
+ end
+ end
+ if n > 0 then
+ cover(coverage,unicode,r)
+ done = done + 1
+ else
+ skip = skip + 1
+ end
+ else
+ local u = tounicode(replacement)
+ if u then
+ cover(coverage,unicode,{ u })
+ done = done + 1
+ else
+ skip = skip + 1
+ end
+ end
end
- -- register in metadata (merge as there can be a few)
- if not gsubfeatures then
- gsubfeatures = { }
- fontfeatures.gsub = gsubfeatures
+ end
+ if next(coverage) then
+ added = true
+ nofsteps = nofsteps + 1
+ steps[nofsteps] = register(coverage,descriptions,resources,feature,featuretype,nofsteps)
+ end
+ end
+ if added then
+ -- script = { lang1, lang2, lang3 } or script = { lang1 = true, ... }
+ for k, v in next, askedfeatures do
+ if v[1] then
+ askedfeatures[k] = table.tohash(v)
end
- local k = gsubfeatures[feature]
- if not k then
- k = { }
- gsubfeatures[feature] = k
+ end
+ local sequence = {
+ chain = 0,
+ features = { [feature] = askedfeatures },
+ flags = featureflags,
+ name = feature, -- not needed
+ order = featureorder,
+ [stepkey] = steps,
+ nofsteps = nofsteps,
+ type = types[featuretype],
+ }
+ if specification.prepend then
+ insert(sequences,1,sequence)
+ else
+ insert(sequences,sequence)
+ end
+ -- register in metadata (merge as there can be a few)
+ if not gsubfeatures then
+ gsubfeatures = { }
+ fontfeatures.gsub = gsubfeatures
+ end
+ local k = gsubfeatures[feature]
+ if not k then
+ k = { }
+ gsubfeatures[feature] = k
+ end
+ for script, languages in next, askedfeatures do
+ local kk = k[script]
+ if not kk then
+ kk = { }
+ k[script] = kk
end
- for script, languages in next, askedfeatures do
- local kk = k[script]
- if not kk then
- kk = { }
- k[script] = kk
- end
- for language, value in next, languages do
- kk[language] = value
- end
+ for language, value in next, languages do
+ kk[language] = value
end
end
end
end
- if trace_loading then
- report_otf("registering feature %a, affected glyphs %a, skipped glyphs %a",feature,done,skip)
- end
+ end
+ if trace_loading then
+ report_otf("registering feature %a, affected glyphs %a, skipped glyphs %a",feature,done,skip)
end
end
@@ -220,7 +308,6 @@ local tlig_specification = {
type = "ligature",
features = everywhere,
data = tlig,
- name = "ctx_tlig",
order = { "tlig" },
flags = noflags,
prepend = true,
@@ -245,7 +332,6 @@ local trep_specification = {
type = "substitution",
features = everywhere,
data = trep,
- name = "ctx_trep",
order = { "trep" },
flags = noflags,
prepend = true,
@@ -376,3 +462,36 @@ registerotffeature {
-- name = 'hangulfix',
-- description = 'fixes for hangul',
-- }
+
+-- fonts.handlers.otf.addfeature {
+-- name = "stest",
+-- type = "substitution",
+-- data = {
+-- a = "X",
+-- b = "P",
+-- }
+-- }
+-- fonts.handlers.otf.addfeature {
+-- name = "atest",
+-- type = "alternate",
+-- data = {
+-- a = { "X", "Y" },
+-- b = { "P", "Q" },
+-- }
+-- }
+-- fonts.handlers.otf.addfeature {
+-- name = "mtest",
+-- type = "multiple",
+-- data = {
+-- a = { "X", "Y" },
+-- b = { "P", "Q" },
+-- }
+-- }
+-- fonts.handlers.otf.addfeature {
+-- name = "ltest",
+-- type = "ligature",
+-- data = {
+-- a = { "X", "Y" },
+-- b = { "P", "Q" },
+-- }
+-- }
diff --git a/tex/context/base/font-otd.lua b/tex/context/base/font-otd.lua
index 2dd23b741..ff1b471fc 100644
--- a/tex/context/base/font-otd.lua
+++ b/tex/context/base/font-otd.lua
@@ -249,23 +249,11 @@ function otf.dataset(tfmdata,font,attr) -- attr only when explicit (as in specia
}
rl[attr] = ra
local sequences = tfmdata.resources.sequences
- -- setmetatableindex(ra, function(t,k)
- -- if type(k) == "number" then
- -- local v = initialize(sequences[k],script,language,s_enabled,a_enabled,font,attr,dynamic)
- -- t[k] = v or false
- -- return v
- -- end
- -- end)
--- for s=1,#sequences do
--- local v = initialize(sequences[s],script,language,s_enabled,a_enabled,font,attr,dynamic)
--- if v then
--- ra[#ra+1] = v
--- end
--- end
- for s=1,#sequences do
- initialize(sequences[s],script,language,s_enabled,a_enabled,font,attr,dynamic,ra)
+ if sequences then
+ for s=1,#sequences do
+ initialize(sequences[s],script,language,s_enabled,a_enabled,font,attr,dynamic,ra)
+ end
end
--- table.save((jit and "tmc-" or "tma-")..font..".log",ra) -- bug in jit
end
return ra
diff --git a/tex/context/base/font-otf.lua b/tex/context/base/font-otf.lua
index edd9ed2de..423035fff 100644
--- a/tex/context/base/font-otf.lua
+++ b/tex/context/base/font-otf.lua
@@ -21,7 +21,7 @@ if not modules then modules = { } end modules ['font-otf'] = {
-- more checking against low level calls of functions
local utfbyte = utf.byte
-local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
+local gmatch, gsub, find, match, lower, strip = string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
local type, next, tonumber, tostring = type, next, tonumber, tostring
local abs = math.abs
local reversed, concat, insert, remove, sortedkeys = table.reversed, table.concat, table.insert, table.remove, table.sortedkeys
@@ -58,7 +58,7 @@ local otf = fonts.handlers.otf
otf.glists = { "gsub", "gpos" }
-otf.version = 2.816 -- beware: also sync font-mis.lua and in mtx-fonts
+otf.version = 2.817 -- beware: also sync font-mis.lua and in mtx-fonts
otf.cache = containers.define("fonts", "otf", otf.version, true)
local hashes = fonts.hashes
@@ -296,7 +296,7 @@ local ordered_enhancers = {
"expand lookups", -- a temp hack awaiting the lua loader
- "check extra features", -- after metadata and duplicates
+-- "check extra features", -- after metadata and duplicates
"cleanup tables",
@@ -600,6 +600,7 @@ function otf.load(filename,sub,featurefile) -- second argument (format) is gone
applyruntimefixes(filename,data)
end
enhance("add dimensions",data,filename,nil,false)
+enhance("check extra features",data,filename)
if trace_sequences then
showfeatureorder(data,filename)
end
@@ -791,7 +792,7 @@ actions["prepare glyphs"] = function(data,filename,raw)
end
if not unicode or unicode == -1 then -- or unicode >= criterium then
if not name then
- name = format("u%06X.ctx",private)
+ name = formatters["u%06X.ctx"](private)
end
unicode = private
unicodes[name] = private
@@ -814,7 +815,7 @@ actions["prepare glyphs"] = function(data,filename,raw)
-- end
-- end
if not name then
- name = format("u%06X.ctx",unicode)
+ name = formatters["u%06X.ctx"](unicode)
end
unicodes[name] = unicode
nofunicodes = nofunicodes + 1
@@ -927,7 +928,7 @@ actions["prepare glyphs"] = function(data,filename,raw)
end
indices[index] = unicode
-- if not name then
- -- name = format("u%06X",unicode) -- u%06X.ctx
+ -- name = formatters["u%06X"](unicode) -- u%06X.ctx
-- end
descriptions[unicode] = {
-- width = glyph.width,
@@ -1100,7 +1101,7 @@ actions["add duplicates"] = function(data,filename,raw)
end
if u > 0 then -- and
local duplicate = table.copy(description) -- else packing problem
- duplicate.comment = format("copy of U+%05X", unicode)
+ duplicate.comment = formatters["copy of %U"](unicode)
descriptions[u] = duplicate
-- validduplicates[#validduplicates+1] = u
if trace_loading then
@@ -2897,3 +2898,33 @@ function otf.scriptandlanguage(tfmdata,attr)
local properties = tfmdata.properties
return properties.script or "dflt", properties.language or "dflt"
end
+
+-- a little bit of abstraction
+
+local function justset(coverage,unicode,replacement)
+ coverage[unicode] = replacement
+end
+
+otf.coverup = {
+ stepkey = "subtables",
+ actions = {
+ substitution = justset,
+ alternate = justset,
+ multiple = justset,
+ ligature = justset,
+ },
+ register = function(coverage,descriptions,resources,feature,lookuptype,n)
+ local name = formatters["ctx_%s_%s"](feature,n)
+ resources.lookuptypes[name] = lookuptype
+ for u, c in next, coverage do
+ local description = descriptions[u]
+ local slookups = description.slookups
+ if slookups then
+ slookups[name] = c
+ else
+ description.slookups = { [name] = c }
+ end
+ end
+ return name
+ end
+}
diff --git a/tex/context/base/font-otl.lua b/tex/context/base/font-otl.lua
index 58cce837c..bd76fced6 100644
--- a/tex/context/base/font-otl.lua
+++ b/tex/context/base/font-otl.lua
@@ -15,11 +15,692 @@ if not modules then modules = { } end modules ['font-otl'] = {
-- lookups as well as the specification. Keeping the lookup data in the glyphs is
-- very instructive and handy for tracing. On the other hand hashing is what brings
-- speed. So, the in the new approach (the old one will stay around too) we no
--- longer keep data in the glyphs which saves us (what in retrospect looks a bit
+-- longer keep data in the glyphs which saves us a (what in retrospect looks a bit
-- like) a reconstruction step. It also means that the data format of the cached
-- files changes. What method is used depends on that format. There is no fundamental
-- change in processing, and not even in data organation. Most has to do with
-- loading and storage.
--- This file is mostly used for experiments (on my machine) before they make it into
--- the core.
+-- todo: less tounicodes
+
+local gmatch, find, match, lower, strip = string.gmatch, string.find, string.match, string.lower, string.strip
+local type, next, tonumber, tostring = type, next, tonumber, tostring
+local abs = math.abs
+local ioflush = io.flush
+local derivetable = table.derive
+local formatters = string.formatters
+
+local setmetatableindex = table.setmetatableindex
+local allocate = utilities.storage.allocate
+local registertracker = trackers.register
+local registerdirective = directives.register
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+local elapsedtime = statistics.elapsedtime
+local findbinfile = resolvers.findbinfile
+
+----- trace_private = false registertracker("otf.private", function(v) trace_private = v end)
+----- trace_subfonts = false registertracker("otf.subfonts", function(v) trace_subfonts = v end)
+local trace_loading = false registertracker("otf.loading", function(v) trace_loading = v end)
+local trace_features = false registertracker("otf.features", function(v) trace_features = v end)
+----- trace_dynamics = false registertracker("otf.dynamics", function(v) trace_dynamics = v end)
+----- trace_sequences = false registertracker("otf.sequences", function(v) trace_sequences = v end)
+----- trace_markwidth = false registertracker("otf.markwidth", function(v) trace_markwidth = v end)
+local trace_defining = false registertracker("fonts.defining", function(v) trace_defining = v end)
+
+local report_otf = logs.reporter("fonts","otf loading")
+
+local fonts = fonts
+local otf = fonts.handlers.otf
+
+otf.version = 3.000 -- beware: also sync font-mis.lua and in mtx-fonts
+otf.cache = containers.define("fonts", "otl", otf.version, true)
+
+local otfreaders = otf.readers
+
+local hashes = fonts.hashes
+local definers = fonts.definers
+local readers = fonts.readers
+local constructors = fonts.constructors
+
+local otffeatures = constructors.newfeatures("otf")
+local registerotffeature = otffeatures.register
+
+local enhancers = allocate()
+otf.enhancers = enhancers
+local patches = { }
+enhancers.patches = patches
+
+local forceload = false
+local cleanup = 0 -- mk: 0=885M 1=765M 2=735M (regular run 730M)
+local syncspace = true
+local forcenotdef = false
+
+local applyruntimefixes = fonts.treatments and fonts.treatments.applyfixes
+
+local wildcard = "*"
+local default = "dflt"
+
+local formats = fonts.formats
+
+formats.otf = "opentype"
+formats.ttf = "truetype"
+formats.ttc = "truetype"
+
+registerdirective("fonts.otf.loader.cleanup", function(v) cleanup = tonumber(v) or (v and 1) or 0 end)
+registerdirective("fonts.otf.loader.force", function(v) forceload = v end)
+registerdirective("fonts.otf.loader.syncspace", function(v) syncspace = v end)
+registerdirective("fonts.otf.loader.forcenotdef", function(v) forcenotdef = v end)
+
+-- local function load_featurefile(raw,featurefile)
+-- if featurefile and featurefile ~= "" then
+-- if trace_loading then
+-- report_otf("using featurefile %a", featurefile)
+-- end
+-- -- TODO: apply_featurefile(raw, featurefile)
+-- end
+-- end
+
+local ordered_enhancers = {
+ "check extra features",
+}
+
+local actions = allocate()
+local before = allocate()
+local after = allocate()
+
+patches.before = before
+patches.after = after
+
+local function enhance(name,data,filename,raw)
+ local enhancer = actions[name]
+ if enhancer then
+ if trace_loading then
+ report_otf("apply enhancement %a to file %a",name,filename)
+ ioflush()
+ end
+ enhancer(data,filename,raw)
+ else
+ -- no message as we can have private ones
+ end
+end
+
+function enhancers.apply(data,filename,raw)
+ local basename = file.basename(lower(filename))
+ if trace_loading then
+ report_otf("%s enhancing file %a","start",filename)
+ end
+ ioflush() -- we want instant messages
+ for e=1,#ordered_enhancers do
+ local enhancer = ordered_enhancers[e]
+ local b = before[enhancer]
+ if b then
+ for pattern, action in next, b do
+ if find(basename,pattern) then
+ action(data,filename,raw)
+ end
+ end
+ end
+ enhance(enhancer,data,filename,raw)
+ local a = after[enhancer]
+ if a then
+ for pattern, action in next, a do
+ if find(basename,pattern) then
+ action(data,filename,raw)
+ end
+ end
+ end
+ ioflush() -- we want instant messages
+ end
+ if trace_loading then
+ report_otf("%s enhancing file %a","stop",filename)
+ end
+ ioflush() -- we want instant messages
+end
+
+-- patches.register("before","migrate metadata","cambria",function() end)
+
+function patches.register(what,where,pattern,action)
+ local pw = patches[what]
+ if pw then
+ local ww = pw[where]
+ if ww then
+ ww[pattern] = action
+ else
+ pw[where] = { [pattern] = action}
+ end
+ end
+end
+
+function patches.report(fmt,...)
+ if trace_loading then
+ report_otf("patching: %s",formatters[fmt](...))
+ end
+end
+
+function enhancers.register(what,action) -- only already registered can be overloaded
+ actions[what] = action
+end
+
+function otf.load(filename,sub,featurefile) -- second argument (format) is gone !
+ local base = file.basename(file.removesuffix(filename))
+ local name = file.removesuffix(base)
+ local attr = lfs.attributes(filename)
+ local size = attr and attr.size or 0
+ local time = attr and attr.modification or 0
+ if featurefile then
+ name = name .. "@" .. file.removesuffix(file.basename(featurefile))
+ end
+ -- or: sub = tonumber(sub)
+ if sub == "" then
+ sub = false
+ end
+ local hash = name
+ if sub then
+ hash = hash .. "-" .. sub
+ end
+ hash = containers.cleanname(hash)
+ local featurefiles
+ if featurefile then
+ featurefiles = { }
+ for s in gmatch(featurefile,"[^,]+") do
+ local name = resolvers.findfile(file.addsuffix(s,'fea'),'fea') or ""
+ if name == "" then
+ report_otf("loading error, no featurefile %a",s)
+ else
+ local attr = lfs.attributes(name)
+ featurefiles[#featurefiles+1] = {
+ name = name,
+ size = attr and attr.size or 0,
+ time = attr and attr.modification or 0,
+ }
+ end
+ end
+ if #featurefiles == 0 then
+ featurefiles = nil
+ end
+ end
+ local data = containers.read(otf.cache,hash)
+ local reload = not data or data.size ~= size or data.time ~= time or data.tableversion ~= otfreaders.tableversion
+ if forceload then
+ report_otf("forced reload of %a due to hard coded flag",filename)
+ reload = true
+ end
+ if not reload then
+ local featuredata = data.featuredata
+ if featurefiles then
+ if not featuredata or #featuredata ~= #featurefiles then
+ reload = true
+ else
+ for i=1,#featurefiles do
+ local fi, fd = featurefiles[i], featuredata[i]
+ if fi.name ~= fd.name or fi.size ~= fd.size or fi.time ~= fd.time then
+ reload = true
+ break
+ end
+ end
+ end
+ elseif featuredata then
+ reload = true
+ end
+ if reload then
+ report_otf("loading: forced reload due to changed featurefile specification %a",featurefile)
+ end
+ end
+ if reload then
+ report_otf("loading %a, hash %a",filename,hash)
+ --
+ starttiming(otfreaders)
+ data = otfreaders.loadfont(filename,sub)
+ --
+ -- if featurefiles then
+ -- for i=1,#featurefiles do
+ -- load_featurefile(data,featurefiles[i].name)
+ -- end
+ -- end
+ --
+ --
+ if data then
+ otfreaders.compact(data)
+ otfreaders.rehash(data,"unicodes")
+ otfreaders.addunicodetable(data)
+ otfreaders.pack(data)
+ report_otf("loading done")
+ report_otf("saving %a in cache",filename)
+ data = containers.write(otf.cache, hash, data)
+ if cleanup > 1 then
+ collectgarbage("collect")
+ end
+ stoptiming(otfreaders)
+ if elapsedtime then -- not in generic
+ report_otf("loading, optimizing, packing and caching time %s", elapsedtime(otfreaders))
+ end
+ if cleanup > 3 then
+ collectgarbage("collect")
+ end
+ data = containers.read(otf.cache,hash) -- this frees the old table and load the sparse one
+ if cleanup > 2 then
+ collectgarbage("collect")
+ end
+ else
+ data = nil
+ report_otf("loading failed due to read error")
+ end
+ end
+ if data then
+ if trace_defining then
+ report_otf("loading from cache using hash %a",hash)
+ end
+ --
+ otfreaders.unpack(data)
+ otfreaders.expand(data) -- inline tables
+ otfreaders.addunicodetable(data) -- only when not done yet
+ --
+ enhancers.apply(data,filename,data)
+ --
+ constructors.addcoreunicodes(unicodes)
+ --
+ if applyruntimefixes then
+ applyruntimefixes(filename,data)
+ end
+ --
+ data.metadata.math = data.resources.mathconstants
+ end
+
+
+ return data
+end
+
+-- modes: node, base, none
+
+function otf.setfeatures(tfmdata,features)
+ local okay = constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf)
+ if okay then
+ return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf)
+ else
+ return { } -- will become false
+ end
+end
+
+-- the first version made a top/mid/not extensible table, now we just
+-- pass on the variants data and deal with it in the tfm scaler (there
+-- is no longer an extensible table anyway)
+--
+-- we cannot share descriptions as virtual fonts might extend them (ok,
+-- we could use a cache with a hash
+--
+-- we already assign an empty table to characters as we can add for
+-- instance protruding info and loop over characters; one is not supposed
+-- to change descriptions and if one does so one should make a copy!
+
+local function copytotfm(data,cache_id)
+ if data then
+ local metadata = data.metadata
+ local resources = data.resources
+ local properties = derivetable(data.properties)
+ local descriptions = derivetable(data.descriptions)
+ local goodies = derivetable(data.goodies)
+ local characters = { }
+ local parameters = { }
+ local mathparameters = { }
+ --
+ local resources = data.resources
+ local unicodes = resources.unicodes
+ local spaceunits = 500
+ local spacer = "space"
+ local designsize = metadata.designsize or 100
+ local minsize = metadata.minsize or designsize
+ local maxsize = metadata.maxsize or designsize
+ local mathspecs = metadata.math
+ --
+ if designsize == 0 then
+ designsize = 100
+ minsize = 100
+ maxsize = 100
+ end
+ if mathspecs then
+ for name, value in next, mathspecs do
+ mathparameters[name] = value
+ end
+ end
+ for unicode in next, data.descriptions do -- use parent table
+ characters[unicode] = { }
+ end
+ if mathspecs then
+ for unicode, character in next, characters do
+ local d = descriptions[unicode]
+ local m = d.math
+ if m then
+ -- watch out: luatex uses horiz_variants for the parts
+ local variants = m.hvariants
+ local parts = m.hparts
+ if variants then
+ local c = character
+ for i=1,#variants do
+ -- local un = variants[i].glyph
+ local un = variants[i]
+ c.next = un
+ c = characters[un]
+ end -- c is now last in chain
+ c.horiz_variants = parts
+ elseif parts then
+ character.horiz_variants = parts
+ end
+ local variants = m.vvariants
+ local parts = m.vparts
+ if variants then
+ local c = character
+ for i=1,#variants do
+ -- local un = variants[i].glyph
+ local un = variants[i]
+ c.next = un
+ c = characters[un]
+ end -- c is now last in chain
+ c.vert_variants = parts
+ elseif parts then
+ character.vert_variants = parts
+ end
+ local italic_correction = m.italic -- vitalic ?
+ if italic_correction then
+ character.vert_italic_correction = italic_correction -- was c.
+ end
+ local top_accent = m.accent -- taccent?
+ if top_accent then
+ character.top_accent = top_accent
+ end
+ local kerns = m.kerns
+ if kerns then
+ character.mathkerns = kerns
+ end
+ end
+ end
+ end
+ -- we need a runtime lookup because of running from cdrom or zip, brrr (shouldn't
+ -- we use the basename then?)
+ local filename = constructors.checkedfilename(resources)
+ local fontname = metadata.fontname
+ local fullname = metadata.fullname or fontname
+ local psname = fontname or fullname
+ local units = metadata.units or 1000
+ --
+ if units == 0 then -- catch bugs in fonts
+ units = 1000 -- maybe 2000 when ttf
+ metadata.units = 1000
+ report_otf("changing %a units to %a",0,units)
+ end
+ --
+ local monospaced = metadata.monospaced
+ local charwidth = metadata.averagewidth -- or unset
+ local charxheight = metadata.xheight -- or unset
+ local italicangle = metadata.italicangle
+ properties.monospaced = monospaced
+ parameters.italicangle = italicangle
+ parameters.charwidth = charwidth
+ parameters.charxheight = charxheight
+ --
+ local space = 0x0020
+ local emdash = 0x2014
+ if monospaced then
+ if descriptions[space] then
+ spaceunits, spacer = descriptions[space].width, "space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits, spacer = descriptions[emdash].width, "emdash"
+ end
+ if not spaceunits and charwidth then
+ spaceunits, spacer = charwidth, "charwidth"
+ end
+ else
+ if descriptions[space] then
+ spaceunits, spacer = descriptions[space].width, "space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits, spacer = descriptions[emdash].width/2, "emdash/2"
+ end
+ if not spaceunits and charwidth then
+ spaceunits, spacer = charwidth, "charwidth"
+ end
+ end
+ spaceunits = tonumber(spaceunits) or 500 -- brrr
+ --
+ parameters.slant = 0
+ parameters.space = spaceunits -- 3.333 (cmr10)
+ parameters.space_stretch = units/2 -- 500 -- 1.666 (cmr10)
+ parameters.space_shrink = 1*units/3 -- 333 -- 1.111 (cmr10)
+ parameters.x_height = 2*units/5 -- 400
+ parameters.quad = units -- 1000
+ if spaceunits < 2*units/5 then
+ -- todo: warning
+ end
+ if italicangle and italicangle ~= 0 then
+ parameters.italicangle = italicangle
+ parameters.italicfactor = math.cos(math.rad(90+italicangle))
+ parameters.slant = - math.tan(italicangle*math.pi/180)
+ end
+ if monospaced then
+ parameters.space_stretch = 0
+ parameters.space_shrink = 0
+ elseif syncspace then --
+ parameters.space_stretch = spaceunits/2
+ parameters.space_shrink = spaceunits/3
+ end
+ parameters.extra_space = parameters.space_shrink -- 1.111 (cmr10)
+ if charxheight then
+ parameters.x_height = charxheight
+ else
+ local x = 0x0078
+ if x then
+ local x = descriptions[x]
+ if x then
+ parameters.x_height = x.height
+ end
+ end
+ end
+ --
+ parameters.designsize = (designsize/10)*65536
+ parameters.minsize = (minsize /10)*65536
+ parameters.maxsize = (maxsize /10)*65536
+ parameters.ascender = abs(metadata.ascender or 0)
+ parameters.descender = abs(metadata.descender or 0)
+ parameters.units = units
+ --
+ properties.space = spacer
+ properties.encodingbytes = 2
+ properties.format = data.format or formats.otf
+ properties.noglyphnames = true
+ properties.filename = filename
+ properties.fontname = fontname
+ properties.fullname = fullname
+ properties.psname = psname
+ properties.name = filename or fullname
+ --
+ -- properties.name = specification.name
+ -- properties.sub = specification.sub
+ --
+ return {
+ characters = characters,
+ descriptions = descriptions,
+ parameters = parameters,
+ mathparameters = mathparameters,
+ resources = resources,
+ properties = properties,
+ goodies = goodies,
+ }
+ end
+end
+
+local function otftotfm(specification)
+ local cache_id = specification.hash
+ local tfmdata = containers.read(constructors.cache,cache_id)
+ if not tfmdata then
+ local name = specification.name
+ local sub = specification.sub
+ local filename = specification.filename
+ local features = specification.features.normal
+ local rawdata = otf.load(filename,sub,features and features.featurefile)
+ if rawdata and next(rawdata) then
+ local descriptions = rawdata.descriptions
+ rawdata.lookuphash = { } -- to be done
+ tfmdata = copytotfm(rawdata,cache_id)
+ if tfmdata and next(tfmdata) then
+ -- at this moment no characters are assigned yet, only empty slots
+ local features = constructors.checkedfeatures("otf",features)
+ local shared = tfmdata.shared
+ if not shared then
+ shared = { }
+ tfmdata.shared = shared
+ end
+ shared.rawdata = rawdata
+ -- shared.features = features -- default
+ shared.dynamics = { }
+ -- shared.processes = { }
+ tfmdata.changed = { }
+ shared.features = features
+ shared.processes = otf.setfeatures(tfmdata,features)
+ end
+ end
+ containers.write(constructors.cache,cache_id,tfmdata)
+ end
+ return tfmdata
+end
+
+local function read_from_otf(specification)
+ local tfmdata = otftotfm(specification)
+ if tfmdata then
+ -- this late ? .. needs checking
+ tfmdata.properties.name = specification.name
+ tfmdata.properties.sub = specification.sub
+ --
+ tfmdata = constructors.scale(tfmdata,specification)
+ local allfeatures = tfmdata.shared.features or specification.features.normal
+ constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf)
+ constructors.setname(tfmdata,specification) -- only otf?
+ fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification)
+ end
+ return tfmdata
+end
+
+local function checkmathsize(tfmdata,mathsize)
+ local mathdata = tfmdata.shared.rawdata.metadata.math
+ local mathsize = tonumber(mathsize)
+ if mathdata then -- we cannot use mathparameters as luatex will complain
+ local parameters = tfmdata.parameters
+ parameters.scriptpercentage = mathdata.ScriptPercentScaleDown
+ parameters.scriptscriptpercentage = mathdata.ScriptScriptPercentScaleDown
+ parameters.mathsize = mathsize
+ end
+end
+
+registerotffeature {
+ name = "mathsize",
+ description = "apply mathsize specified in the font",
+ initializers = {
+ base = checkmathsize,
+ node = checkmathsize,
+ }
+}
+
+-- readers
+
+function otf.collectlookups(rawdata,kind,script,language)
+ local sequences = rawdata.resources.sequences
+ if sequences then
+ local featuremap = { }
+ local featurelist = { }
+ for s=1,#sequences do
+ local sequence = sequences[s]
+ local features = sequence.features
+ if features then
+ features = features[kind]
+ if features then
+ features = features[script] or features[default] or features[wildcard]
+ if features then
+ features = features[language] or features[default] or features[wildcard]
+ if features then
+ if not featuremap[sequence] then
+ featuremap[sequence] = true
+ featurelist[#featurelist+1] = sequence
+ end
+ end
+ end
+ end
+ end
+ end
+ if #featurelist > 0 then
+ return featuremap, featurelist
+ end
+ end
+end
+
+local function check_otf(forced,specification,suffix)
+ local name = specification.name
+ if forced then
+ name = specification.forcedname -- messy
+ end
+ local fullname = findbinfile(name,suffix) or ""
+ if fullname == "" then
+ fullname = fonts.names.getfilename(name,suffix) or ""
+ end
+ if fullname ~= "" and not fonts.names.ignoredfile(fullname) then
+ specification.filename = fullname
+ return read_from_otf(specification)
+ end
+end
+
+local function opentypereader(specification,suffix)
+ local forced = specification.forced or ""
+ if formats[forced] then
+ return check_otf(true,specification,forced)
+ else
+ return check_otf(false,specification,suffix)
+ end
+end
+
+readers.opentype = opentypereader -- kind of useless and obsolete
+
+function readers.otf (specification) return opentypereader(specification,"otf") end
+function readers.ttf (specification) return opentypereader(specification,"ttf") end
+function readers.ttc (specification) return opentypereader(specification,"ttf") end
+
+-- this will be overloaded
+
+function otf.scriptandlanguage(tfmdata,attr)
+ local properties = tfmdata.properties
+ return properties.script or "dflt", properties.language or "dflt"
+end
+
+-- a little bit of abstraction
+
+local function justset(coverage,unicode,replacement)
+ coverage[unicode] = replacement
+end
+
+otf.coverup = {
+ stepkey = "steps",
+ actions = {
+ substitution = justset,
+ alternate = justset,
+ multiple = justset,
+ ligature = function (coverage,unicode,ligature)
+ local first = ligature[1]
+ local tree = coverage[first]
+ if not tree then
+ tree = { }
+ coverage[first] = tree
+ end
+ for i=2,#ligature do
+ local l = ligature[i]
+ local t = tree[l]
+ if not t then
+ t = { }
+ tree[l] = t
+ end
+ tree = t
+ end
+ tree.ligature = unicode
+ end,
+ },
+ register = function(coverage)
+ return { coverage = coverage }
+ end
+}
diff --git a/tex/context/base/font-oto.lua b/tex/context/base/font-oto.lua
new file mode 100644
index 000000000..08e2fe3cf
--- /dev/null
+++ b/tex/context/base/font-oto.lua
@@ -0,0 +1,452 @@
+if not modules then modules = { } end modules ['font-oto'] = { -- original tex
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This is a version of font-otb adapted to the new fontloader code. We used to have two
+-- base initialization methods but now we have only one. This means that instead of the
+-- old default (independent) we now use the one more similar to node node (shared).
+
+local concat, unpack = table.concat, table.unpack
+local insert, remove = table.insert, table.remove
+local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
+local type, next, tonumber, tostring, rawget = type, next, tonumber, tostring, rawget
+local lpegmatch = lpeg.match
+local utfchar = utf.char
+
+local trace_baseinit = false trackers.register("otf.baseinit", function(v) trace_baseinit = v end)
+local trace_singles = false trackers.register("otf.singles", function(v) trace_singles = v end)
+local trace_multiples = false trackers.register("otf.multiples", function(v) trace_multiples = v end)
+local trace_alternatives = false trackers.register("otf.alternatives", function(v) trace_alternatives = v end)
+local trace_ligatures = false trackers.register("otf.ligatures", function(v) trace_ligatures = v end)
+local trace_ligatures_detail = false trackers.register("otf.ligatures.detail", function(v) trace_ligatures_detail = v end)
+local trace_kerns = false trackers.register("otf.kerns", function(v) trace_kerns = v end)
+local trace_preparing = false trackers.register("otf.preparing", function(v) trace_preparing = v end)
+
+local report_prepare = logs.reporter("fonts","otf prepare")
+
+local fonts = fonts
+local otf = fonts.handlers.otf
+
+local otffeatures = otf.features
+local registerotffeature = otffeatures.register
+
+otf.defaultbasealternate = "none" -- first last
+
+local wildcard = "*"
+local default = "dflt"
+
+local formatters = string.formatters
+local f_unicode = formatters["%U"]
+local f_uniname = formatters["%U (%s)"]
+local f_unilist = formatters["% t (% t)"]
+
+local function gref(descriptions,n)
+ if type(n) == "number" then
+ local name = descriptions[n].name
+ if name then
+ return f_uniname(n,name)
+ else
+ return f_unicode(n)
+ end
+ elseif n then
+ local num, nam, j = { }, { }, 0
+ for i=1,#n do
+ local ni = n[i]
+ if tonumber(ni) then -- first is likely a key
+ j = j + 1
+ local di = descriptions[ni]
+ num[j] = f_unicode(ni)
+ nam[j] = di and di.name or "-"
+ end
+ end
+ return f_unilist(num,nam)
+ else
+ return "<error in base mode tracing>"
+ end
+end
+
+local function cref(feature,sequence)
+ return formatters["feature %a, type %a, chain lookup %a"](feature,sequence.type,sequence.name)
+end
+
+
+local function report_alternate(feature,sequence,descriptions,unicode,replacement,value,comment)
+ report_prepare("%s: base alternate %s => %s (%S => %S)",
+ cref(feature,sequence),
+ gref(descriptions,unicode),
+ replacement and gref(descriptions,replacement),
+ value,
+ comment)
+end
+
+local function report_substitution(feature,sequence,descriptions,unicode,substitution)
+ report_prepare("%s: base substitution %s => %S",
+ cref(feature,sequence),
+ gref(descriptions,unicode),
+ gref(descriptions,substitution))
+end
+
+local function report_ligature(feature,sequence,descriptions,unicode,ligature)
+ report_prepare("%s: base ligature %s => %S",
+ cref(feature,sequence),
+ gref(descriptions,ligature),
+ gref(descriptions,unicode))
+end
+
+local function report_kern(feature,sequence,descriptions,unicode,otherunicode,value)
+ report_prepare("%s: base kern %s + %s => %S",
+ cref(feature,sequence),
+ gref(descriptions,unicode),
+ gref(descriptions,otherunicode),
+ value)
+end
+
+-- We need to make sure that luatex sees the difference between base fonts that have
+-- different glyphs in the same slots in fonts that have the same fullname (or filename).
+-- LuaTeX will merge fonts eventually (and subset later on). If needed we can use a more
+-- verbose name as long as we don't use <()<>[]{}/%> and the length is < 128.
+
+local basehash, basehashes, applied = { }, 1, { }
+
+local function registerbasehash(tfmdata)
+ local properties = tfmdata.properties
+ local hash = concat(applied," ")
+ local base = basehash[hash]
+ if not base then
+ basehashes = basehashes + 1
+ base = basehashes
+ basehash[hash] = base
+ end
+ properties.basehash = base
+ properties.fullname = properties.fullname .. "-" .. base
+ -- report_prepare("fullname base hash '%a, featureset %a",tfmdata.properties.fullname,hash)
+ applied = { }
+end
+
+local function registerbasefeature(feature,value)
+ applied[#applied+1] = feature .. "=" .. tostring(value)
+end
+
+-- The original basemode ligature builder used the names of components and did some expression
+-- juggling to get the chain right. The current variant starts with unicodes but still uses
+-- names to make the chain. This is needed because we have to create intermediates when needed
+-- but use predefined snippets when available. To some extend the current builder is more stupid
+-- but I don't worry that much about it as ligatures are rather predicatable.
+--
+-- Personally I think that an ff + i == ffi rule as used in for instance latin modern is pretty
+-- weird as no sane person will key that in and expect a glyph for that ligature plus the following
+-- character. Anyhow, as we need to deal with this, we do, but no guarantes are given.
+--
+-- latin modern dejavu
+--
+-- f+f 102 102 102 102
+-- f+i 102 105 102 105
+-- f+l 102 108 102 108
+-- f+f+i 102 102 105
+-- f+f+l 102 102 108 102 102 108
+-- ff+i 64256 105 64256 105
+-- ff+l 64256 108
+--
+-- As you can see here, latin modern is less complete than dejavu but
+-- in practice one will not notice it.
+--
+-- The while loop is needed because we need to resolve for instance pseudo names like
+-- hyphen_hyphen to endash so in practice we end up with a bit too many definitions but the
+-- overhead is neglectable. We can have changed[first] or changed[second] but it quickly becomes
+-- messy if we need to take that into account.
+
+local function makefake(tfmdata,name,present)
+ local resources = tfmdata.resources
+ local private = resources.private
+ local character = { intermediate = true, ligatures = { } }
+ resources.unicodes[name] = private
+ tfmdata.characters[private] = character
+ tfmdata.descriptions[private] = { name = name }
+ resources.private = private + 1
+ present[name] = private
+ return character
+end
+
+local function make_1(present,tree,name)
+ for k, v in next, tree do
+ if k == "ligature" then
+ present[name] = v
+ else
+ make_1(present,v,name .. "_" .. k)
+ end
+ end
+end
+
+local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,sequence)
+ for k, v in next, tree do
+ if k == "ligature" then
+ local character = characters[preceding]
+ if not character then
+ if trace_baseinit then
+ report_prepare("weird ligature in lookup %a, current %C, preceding %C",sequence.name,v,preceding)
+ end
+ character = makefake(tfmdata,name,present)
+ end
+ local ligatures = character.ligatures
+ if ligatures then
+ ligatures[unicode] = { char = v }
+ else
+ character.ligatures = { [unicode] = { char = v } }
+ end
+ if done then
+ local d = done[lookupname]
+ if not d then
+ done[lookupname] = { "dummy", v }
+ else
+ d[#d+1] = v
+ end
+ end
+ else
+ local code = present[name] or unicode
+ local name = name .. "_" .. k
+ make_2(present,tfmdata,characters,v,name,code,k,done,sequence)
+ end
+ end
+end
+
+local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
+ local characters = tfmdata.characters
+ local descriptions = tfmdata.descriptions
+ local resources = tfmdata.resources
+ local changed = tfmdata.changed
+
+ local ligatures = { }
+ local alternate = tonumber(value) or true and 1
+ local defaultalt = otf.defaultbasealternate
+
+ local trace_singles = trace_baseinit and trace_singles
+ local trace_alternatives = trace_baseinit and trace_alternatives
+ local trace_ligatures = trace_baseinit and trace_ligatures
+
+ for i=1,#lookuplist do
+ local sequence = lookuplist[i]
+ local steps = sequence.steps
+ local kind = sequence.type
+ if kind == "gsub_single" then
+ for i=1,#steps do
+ for unicode, data in next, steps[i].coverage do
+ if not changed[unicode] then
+ if trace_singles then
+ report_substitution(feature,sequence,descriptions,unicode,data)
+ end
+ changed[unicode] = data
+ end
+ end
+ end
+ elseif kind == "gsub_alternate" then
+ for i=1,#steps do
+ for unicode, data in next, steps[i].coverage do
+ if not changed[unicode] then
+ local replacement = data[alternate]
+ if replacement then
+ changed[unicode] = replacement
+ if trace_alternatives then
+ report_alternate(feature,sequence,descriptions,unicode,replacement,value,"normal")
+ end
+ elseif defaultalt == "first" then
+ replacement = data[1]
+ changed[unicode] = replacement
+ if trace_alternatives then
+ report_alternate(feature,sequence,descriptions,unicode,replacement,value,defaultalt)
+ end
+ elseif defaultalt == "last" then
+ replacement = data[#data]
+ if trace_alternatives then
+ report_alternate(feature,sequence,descriptions,unicode,replacement,value,defaultalt)
+ end
+ else
+ if trace_alternatives then
+ report_alternate(feature,sequence,descriptions,unicode,replacement,value,"unknown")
+ end
+ end
+ end
+ end
+ end
+ elseif kind == "gsub_ligature" then
+-- inspect(steps)
+ for i=1,#steps do
+ for unicode, data in next, steps[i].coverage do
+ ligatures[#ligatures+1] = { unicode, data, "" } -- lookupname }
+ if trace_ligatures then
+ report_ligature(feature,sequence,descriptions,unicode,data)
+ end
+ end
+ end
+ end
+ end
+
+ local nofligatures = #ligatures
+
+ if nofligatures > 0 then
+
+ local characters = tfmdata.characters
+ local present = { }
+ local done = trace_baseinit and trace_ligatures and { }
+
+ for i=1,nofligatures do
+ local ligature = ligatures[i]
+ local unicode, tree = ligature[1], ligature[2]
+ make_1(present,tree,"ctx_"..unicode)
+ end
+
+ for i=1,nofligatures do
+ local ligature = ligatures[i]
+ local unicode, tree, lookupname = ligature[1], ligature[2], ligature[3]
+ make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,sequence)
+ end
+
+ end
+
+end
+
+local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
+ local characters = tfmdata.characters
+ local descriptions = tfmdata.descriptions
+ local resources = tfmdata.resources
+ local properties = tfmdata.properties
+ local traceindeed = trace_baseinit and trace_kerns
+ -- check out this sharedkerns trickery
+ for i=1,#lookuplist do
+ local sequence = lookuplist[i]
+ local steps = sequence.steps
+ local kind = sequence.type
+ local format = sequence.format
+ if kind == "gpos_pair" then
+ for i=1,#steps do
+ local step = steps[i]
+ if step.format == "kern" then
+ for unicode, data in next, steps[i].coverage do
+ local character = characters[unicode]
+ local kerns = character.kerns
+ if not kerns then
+ kerns = { }
+ character.kerns = kerns
+ end
+ if traceindeed then
+ for otherunicode, kern in next, data do
+ if not kerns[otherunicode] and kern ~= 0 then
+ kerns[otherunicode] = kern
+ report_kern(feature,sequence,descriptions,unicode,otherunicode,kern)
+ end
+ end
+ else
+ for otherunicode, kern in next, data do
+ if not kerns[otherunicode] and kern ~= 0 then
+ kerns[otherunicode] = kern
+ end
+ end
+ end
+ end
+ else
+ -- normally we don't end up here (yet untested)
+ for unicode, data in next, steps[i].coverage do
+ local character = characters[unicode]
+ local kerns = character.kerns
+ for otherunicode, kern in next, data do
+ if not kern[2] and not (kerns and kerns[otherunicode]) then
+ local kern = kern[1]
+ if kern[1] ~= 0 or kern[2] ~= 0 or kern[4] ~= 0 then
+ kern = kern[3]
+ if kern ~= 0 then
+ if kerns then
+ kerns[otherunicode] = kern
+ else
+ kerns = { [otherunicode] = kern }
+ character.kerns = kerns
+ end
+ if traceindeed then
+ report_kern(feature,sequence,descriptions,unicode,otherunicode,kern)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+end
+
+local function initializehashes(tfmdata)
+ -- already done
+end
+
+local function featuresinitializer(tfmdata,value)
+ if true then -- value then
+ local starttime = trace_preparing and os.clock()
+ local features = tfmdata.shared.features
+ local fullname = tfmdata.properties.fullname or "?"
+ if features then
+ initializehashes(tfmdata)
+ local collectlookups = otf.collectlookups
+ local rawdata = tfmdata.shared.rawdata
+ local properties = tfmdata.properties
+ local script = properties.script
+ local language = properties.language
+ local rawfeatures = rawdata.resources.features
+ local basesubstitutions = rawfeatures and rawfeatures.gsub
+ local basepositionings = rawfeatures and rawfeatures.gpos
+ --
+ if basesubstitutions or basepositionings then
+ local sequences = tfmdata.resources.sequences
+ for s=1,#sequences do
+ local sequence = sequences[s]
+ local sfeatures = sequence.features
+ if sfeatures then
+ local order = sequence.order
+ if order then
+ for i=1,#order do --
+ local feature = order[i]
+ local value = features[feature]
+ if value then
+ local validlookups, lookuplist = collectlookups(rawdata,feature,script,language)
+ if not validlookups then
+ -- skip
+ elseif basesubstitutions and basesubstitutions[feature] then
+ if trace_preparing then
+ report_prepare("filtering base %s feature %a for %a with value %a","sub",feature,fullname,value)
+ end
+ preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ elseif basepositionings and basepositionings[feature] then
+ if trace_preparing then
+ report_prepare("filtering base %a feature %a for %a with value %a","pos",feature,fullname,value)
+ end
+ preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ --
+ registerbasehash(tfmdata)
+ end
+ if trace_preparing then
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,fullname)
+ end
+ end
+end
+
+registerotffeature {
+ name = "features",
+ description = "features",
+ default = true,
+ initializers = {
+ -- position = 1, -- after setscript (temp hack ... we need to force script / language to 1
+ base = featuresinitializer,
+ }
+}
diff --git a/tex/context/base/font-otr.lua b/tex/context/base/font-otr.lua
index ddbe6d271..12775f475 100644
--- a/tex/context/base/font-otr.lua
+++ b/tex/context/base/font-otr.lua
@@ -1305,7 +1305,7 @@ local function checkcmap(f,fontdata,records,platform,encoding,format)
if not reader then
return
end
- -- report("checking cmap: platform %a, encoding %a, format %a",platform,encoding,format)
+ report("checking cmap: platform %a, encoding %a, format %a",platform,encoding,format)
reader(f,fontdata,data)
return true
end
@@ -1360,15 +1360,27 @@ function readers.cmap(f,fontdata,specification)
end
end
--
- checkcmap(f,fontdata,records,3, 1, 4)
- checkcmap(f,fontdata,records,3,10,12)
- -- checkcmap(f,fontdata,records,0, 3, 4)
- -- checkcmap(f,fontdata,records,1, 0, 6)
- checkcmap(f,fontdata,records,0, 5,14)
- -- variantcid = records[0] and records[0][5]
- -- if variantcid then
- -- formatreaders[14](f,fontdata,offset,variantcid[14])
- -- end
+ local ok = false
+ ok = checkcmap(f,fontdata,records,3, 1, 4) or ok
+ ok = checkcmap(f,fontdata,records,3,10,12) or ok
+ ok = checkcmap(f,fontdata,records,0, 3, 4) or ok
+ ok = checkcmap(f,fontdata,records,0, 1, 4) or ok
+ ok = checkcmap(f,fontdata,records,1, 0, 6) or ok
+ -- ok = checkcmap(f,fontdata,records,3, 0, 4) or ok -- maybe
+ -- 1 0 0
+ if not ok then
+ local list = { }
+ for k1, v1 in next, records do
+ for k2, v2 in next, v1 do
+ for k3, v3 in next, v2 do
+ list[#list+1] = formatters["%s.%s.%s"](k1,k2,k3)
+ end
+ end
+ end
+ table.sort(list)
+ report("no unicode cmap record loaded, found tables: % t",list)
+ end
+ checkcmap(f,fontdata,records,0, 5,14) -- variants
--
fontdata.cidmaps = {
version = version,
@@ -1412,14 +1424,14 @@ function readers.kern(f,fontdata,specification)
local length = readushort(f)
local coverage = readushort(f)
-- bit 8-15 of coverage: format 0 or 2
- local format = bit32.rshift(coverage,8) -- is this ok?
+ local format = bit32.rshift(coverage,8) -- is this ok?
if format == 0 then
local nofpairs = readushort(f)
local searchrange = readushort(f)
local entryselector = readushort(f)
local rangeshift = readushort(f)
- local kerns = { }
- local glyphs = fontdata.glyphs
+ local kerns = { }
+ local glyphs = fontdata.glyphs
for i=1,nofpairs do
local left = readushort(f)
local right = readushort(f)
@@ -1432,7 +1444,6 @@ function readers.kern(f,fontdata,specification)
glyph.kerns = { [right] = kern }
end
end
- -- fontdata.kerns = kerns
elseif format == 2 then
report("todo: kern classes")
else
@@ -1609,7 +1620,7 @@ local function getinfo(maindata,sub)
local weight = getname(fontdata,"weight") or cffinfo.weight or metrics.weight
local width = getname(fontdata,"width") or cffinfo.width or metrics.width
return { -- we inherit some inconsistencies/choices from ff
- subfontindex = sub or 0,
+ subfontindex = fontdata.subfontindex or sub or 0,
-- filename = filename,
-- version = name("version"),
-- format = fontdata.format,
@@ -1767,7 +1778,7 @@ local function loadfontdata(specification)
for i=1,nofsubfonts do
offsets[i] = readulong(f)
end
- if subfont then
+ if subfont then -- a number of not
if subfont >= 1 and subfont <= nofsubfonts then
fontdata = readdata(f,offsets[subfont],specification)
else
@@ -1780,6 +1791,7 @@ local function loadfontdata(specification)
for i=1,nofsubfonts do
fontdata = readdata(f,offsets[i],specification)
if fontdata then
+ fontdata.subfontindex = i
report("subfont named %a has index %a",subfont,i)
break
end
@@ -1873,6 +1885,7 @@ function readers.loadfont(filename,n)
glyphs = true,
shapes = false,
lookups = true,
+ -- kerns = true,
subfont = n,
}
if fontdata then
@@ -1891,16 +1904,15 @@ function readers.loadfont(filename,n)
hasitalics = fontdata.hasitalics or false,
},
resources = {
- duplicates = { }, -- todo
- features = fontdata.features,
filename = fontdata.filename,
- sublookups = fontdata.sublookups,
- subtables = fontdata.subtables,
- marks = fontdata.marks or { },
- markclasses = fontdata.markclasses or { },
- marksets = fontdata.marksets or { },
private = privateoffset,
- sequences = fontdata.sequences,
+ duplicates = { }, -- todo
+ features = fontdata.features or { }, -- we need to add these in the loader
+ sublookups = fontdata.sublookups or { }, -- we need to add these in the loader
+ marks = fontdata.marks or { }, -- we need to add these in the loader
+ markclasses = fontdata.markclasses or { }, -- we need to add these in the loader
+ marksets = fontdata.marksets or { }, -- we need to add these in the loader
+ sequences = fontdata.sequences or { }, -- we need to add these in the loader
variants = fontdata.variants, -- variant -> unicode -> glyph
version = getname(fontdata,"version"),
cidinfo = fontdata.cidinfo,
diff --git a/tex/context/base/font-ots.lua b/tex/context/base/font-ots.lua
new file mode 100644
index 000000000..7a650025a
--- /dev/null
+++ b/tex/context/base/font-ots.lua
@@ -0,0 +1,3104 @@
+if not modules then modules = { } end modules ['font-ots'] = { -- sequences
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- This is a version of font-otn.lua adapted to the new font loader code. It
+-- is a context version which can contain experimental code, but when we
+-- have serious patches we will backport to the font-otn files. There will
+-- be a generic variant too.
+
+-- todo: looks like we have a leak somewhere (probably in ligatures)
+-- todo: copy attributes to disc
+
+-- we do some disc juggling where we need to keep in mind that the
+-- pre, post and replace fields can have prev pointers to a nesting
+-- node ... i wonder if that is still needed
+--
+-- not possible:
+--
+-- \discretionary {alpha-} {betagammadelta}
+-- {\discretionary {alphabeta-} {gammadelta}
+-- {\discretionary {alphabetagamma-} {delta}
+-- {alphabetagammadelta}}}
+
+--[[ldx--
+<p>This module is a bit more split up that I'd like but since we also want to test
+with plain <l n='tex'/> it has to be so. This module is part of <l n='context'/>
+and discussion about improvements and functionality mostly happens on the
+<l n='context'/> mailing list.</p>
+
+<p>The specification of OpenType is kind of vague. Apart from a lack of a proper
+free specifications there's also the problem that Microsoft and Adobe
+may have their own interpretation of how and in what order to apply features.
+In general the Microsoft website has more detailed specifications and is a
+better reference. There is also some information in the FontForge help files.</p>
+
+<p>Because there is so much possible, fonts might contain bugs and/or be made to
+work with certain rederers. These may evolve over time which may have the side
+effect that suddenly fonts behave differently.</p>
+
+<p>After a lot of experiments (mostly by Taco, me and Idris) we're now at yet another
+implementation. Of course all errors are mine and of course the code can be
+improved. There are quite some optimizations going on here and processing speed
+is currently acceptable. Not all functions are implemented yet, often because I
+lack the fonts for testing. Many scripts are not yet supported either, but I will
+look into them as soon as <l n='context'/> users ask for it.</p>
+
+<p>The specification leaves room for interpretation. In case of doubt the microsoft
+implementation is the reference as it is the most complete one. As they deal with
+lots of scripts and fonts, Kai and Ivo did a lot of testing of the generic code and
+their suggestions help improve the code. I'm aware that not all border cases can be
+taken care of, unless we accept excessive runtime, and even then the interference
+with other mechanisms (like hyphenation) are not trivial.</p>
+
+<p>Glyphs are indexed not by unicode but in their own way. This is because there is no
+relationship with unicode at all, apart from the fact that a font might cover certain
+ranges of characters. One character can have multiple shapes. However, at the
+<l n='tex'/> end we use unicode so and all extra glyphs are mapped into a private
+space. This is needed because we need to access them and <l n='tex'/> has to include
+then in the output eventually.</p>
+
+<p>The initial data table is rather close to the open type specification and also not
+that different from the one produced by <l n='fontforge'/> but we uses hashes instead.
+In <l n='context'/> that table is packed (similar tables are shared) and cached on disk
+so that successive runs can use the optimized table (after loading the table is
+unpacked). The flattening code used later is a prelude to an even more compact table
+format (and as such it keeps evolving).</p>
+
+<p>This module is sparsely documented because it is a moving target. The table format
+of the reader changes and we experiment a lot with different methods for supporting
+features.</p>
+
+<p>As with the <l n='afm'/> code, we may decide to store more information in the
+<l n='otf'/> table.</p>
+
+<p>Incrementing the version number will force a re-cache. We jump the number by one
+when there's a fix in the <l n='fontforge'/> library or <l n='lua'/> code that
+results in different tables.</p>
+--ldx]]--
+
+local type, next, tonumber = type, next, tonumber
+local random = math.random
+local formatters = string.formatters
+
+local logs, trackers, nodes, attributes = logs, trackers, nodes, attributes
+
+local registertracker = trackers.register
+local registerdirective = directives.register
+
+local fonts = fonts
+local otf = fonts.handlers.otf
+
+local trace_lookups = false registertracker("otf.lookups", function(v) trace_lookups = v end)
+local trace_singles = false registertracker("otf.singles", function(v) trace_singles = v end)
+local trace_multiples = false registertracker("otf.multiples", function(v) trace_multiples = v end)
+local trace_alternatives = false registertracker("otf.alternatives", function(v) trace_alternatives = v end)
+local trace_ligatures = false registertracker("otf.ligatures", function(v) trace_ligatures = v end)
+local trace_contexts = false registertracker("otf.contexts", function(v) trace_contexts = v end)
+local trace_marks = false registertracker("otf.marks", function(v) trace_marks = v end)
+local trace_kerns = false registertracker("otf.kerns", function(v) trace_kerns = v end)
+local trace_cursive = false registertracker("otf.cursive", function(v) trace_cursive = v end)
+local trace_preparing = false registertracker("otf.preparing", function(v) trace_preparing = v end)
+local trace_bugs = false registertracker("otf.bugs", function(v) trace_bugs = v end)
+local trace_details = false registertracker("otf.details", function(v) trace_details = v end)
+local trace_applied = false registertracker("otf.applied", function(v) trace_applied = v end)
+local trace_steps = false registertracker("otf.steps", function(v) trace_steps = v end)
+local trace_skips = false registertracker("otf.skips", function(v) trace_skips = v end)
+local trace_directions = false registertracker("otf.directions", function(v) trace_directions = v end)
+
+local trace_kernruns = false registertracker("otf.kernruns", function(v) trace_kernruns = v end)
+local trace_discruns = false registertracker("otf.discruns", function(v) trace_discruns = v end)
+local trace_compruns = false registertracker("otf.compruns", function(v) trace_compruns = v end)
+
+local quit_on_no_replacement = true -- maybe per font
+local check_discretionaries = true -- "trace"
+local zwnjruns = true
+
+registerdirective("otf.zwnjruns", function(v) zwnjruns = v end)
+registerdirective("otf.chain.quitonnoreplacement",function(value) quit_on_no_replacement = value end)
+
+local report_direct = logs.reporter("fonts","otf direct")
+local report_subchain = logs.reporter("fonts","otf subchain")
+local report_chain = logs.reporter("fonts","otf chain")
+local report_process = logs.reporter("fonts","otf process")
+local report_prepare = logs.reporter("fonts","otf prepare")
+local report_warning = logs.reporter("fonts","otf warning")
+local report_run = logs.reporter("fonts","otf run")
+
+registertracker("otf.replacements", "otf.singles,otf.multiples,otf.alternatives,otf.ligatures")
+registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive")
+registertracker("otf.actions","otf.replacements,otf.positions")
+registertracker("otf.injections","nodes.injections")
+
+registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing")
+
+local nuts = nodes.nuts
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getprop = nuts.getprop
+local setprop = nuts.setprop
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local delete_node = nuts.delete
+local remove_node = nuts.remove
+local copy_node = nuts.copy
+local copy_node_list = nuts.copy_list
+local find_node_tail = nuts.tail
+local flush_node_list = nuts.flush_list
+local free_node = nuts.free
+local end_of_math = nuts.end_of_math
+local traverse_nodes = nuts.traverse
+local traverse_id = nuts.traverse_id
+
+local setmetatableindex = table.setmetatableindex
+
+local zwnj = 0x200C
+local zwj = 0x200D
+local wildcard = "*"
+local default = "dflt"
+
+local nodecodes = nodes.nodecodes
+local whatcodes = nodes.whatcodes
+local glyphcodes = nodes.glyphcodes
+local disccodes = nodes.disccodes
+
+local glyph_code = nodecodes.glyph
+local glue_code = nodecodes.glue
+local disc_code = nodecodes.disc
+local whatsit_code = nodecodes.whatsit
+local math_code = nodecodes.math
+
+local dir_code = whatcodes.dir
+local localpar_code = whatcodes.localpar
+
+local discretionary_code = disccodes.discretionary
+local regular_code = disccodes.regular
+local automatic_code = disccodes.automatic
+
+local ligature_code = glyphcodes.ligature
+
+local privateattribute = attributes.private
+
+-- Something is messed up: we have two mark / ligature indices, one at the injection
+-- end and one here ... this is based on KE's patches but there is something fishy
+-- there as I'm pretty sure that for husayni we need some connection (as it's much
+-- more complex than an average font) but I need proper examples of all cases, not
+-- of only some.
+
+local a_state = privateattribute('state')
+local a_cursbase = privateattribute('cursbase') -- to be checked, probably can go
+
+local injections = nodes.injections
+local setmark = injections.setmark
+local setcursive = injections.setcursive
+local setkern = injections.setkern
+local setpair = injections.setpair
+local resetinjection = injections.reset
+local copyinjection = injections.copy
+local setligaindex = injections.setligaindex
+local getligaindex = injections.getligaindex
+
+local cursonce = true
+
+local fonthashes = fonts.hashes
+local fontdata = fonthashes.identifiers
+
+local otffeatures = fonts.constructors.newfeatures("otf")
+local registerotffeature = otffeatures.register
+
+local onetimemessage = fonts.loggers.onetimemessage or function() end
+
+otf.defaultnodealternate = "none" -- first last
+
+local handlers = { }
+
+-- We use a few global variables. The handler can be called nested but this assumes that the
+-- same font is used. Nested calls are normally not needed (only for devanagari).
+
+local tfmdata = false
+local characters = false
+local descriptions = false
+local marks = false
+local currentfont = false
+local factor = 0
+
+-- head is always a whatsit so we can safely assume that head is not changed
+
+-- handlers .whatever(head,start, dataset,sequence,kerns, step,i,injection)
+-- chainprocs.whatever(head,start,stop,dataset,sequence,currentlookup,chainindex)
+
+-- we use this for special testing and documentation
+
+local checkstep = (nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end
+local registerstep = (nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end
+local registermessage = (nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end
+
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_direct(...)
+end
+
+local function logwarning(...)
+ report_direct(...)
+end
+
+local f_unicode = formatters["%U"]
+local f_uniname = formatters["%U (%s)"]
+local f_unilist = formatters["% t (% t)"]
+
+local function gref(n) -- currently the same as in font-otb
+ if type(n) == "number" then
+ local description = descriptions[n]
+ local name = description and description.name
+ if name then
+ return f_uniname(n,name)
+ else
+ return f_unicode(n)
+ end
+ elseif n then
+ local num, nam = { }, { }
+ for i=1,#n do
+ local ni = n[i]
+ if tonumber(ni) then -- later we will start at 2
+ local di = descriptions[ni]
+ num[i] = f_unicode(ni)
+ nam[i] = di and di.name or "-"
+ end
+ end
+ return f_unilist(num,nam)
+ else
+ return "<error in node mode tracing>"
+ end
+end
+
+local function cref(dataset,sequence,index)
+ if index then
+ return formatters["feature %a, type %a, chain lookup %a, index %a"](dataset[4],sequence.type,sequence.name,index)
+ else
+ return formatters["feature %a, type %a, chain lookup %a"](dataset[4],sequence.type,sequence.name)
+ end
+end
+
+local function pref(dataset,sequence)
+ return formatters["feature %a, type %a, lookup %a"](dataset[4],sequence.type,sequence.name)
+end
+
+local function mref(rlmode)
+ if not rlmode or rlmode == 0 then
+ return "---"
+ elseif rlmode < 0 then
+ return "r2l"
+ else
+ return "l2r"
+ end
+end
+
+-- We can assume that languages that use marks are not hyphenated. We can also assume
+-- that at most one discretionary is present.
+
+-- We do need components in funny kerning mode but maybe I can better reconstruct then
+-- as we do have the font components info available; removing components makes the
+-- previous code much simpler. Also, later on copying and freeing becomes easier.
+-- However, for arabic we need to keep them around for the sake of mark placement
+-- and indices.
+
+local function copy_glyph(g) -- next and prev are untouched !
+ local components = getfield(g,"components")
+ if components then
+ setfield(g,"components",nil)
+ local n = copy_node(g)
+ copyinjection(n,g) -- we need to preserve the lig indices
+ setfield(g,"components",components)
+ return n
+ else
+ local n = copy_node(g)
+ copyinjection(n,g) -- we need to preserve the lig indices
+ return n
+ end
+end
+
+-- temp here (context)
+
+local function collapse_disc(start,next)
+ local replace1 = getfield(start,"replace")
+ local replace2 = getfield(next,"replace")
+ if replace1 and replace2 then
+ local pre2 = getfield(next,"pre")
+ local post2 = getfield(next,"post")
+ setfield(replace1,"prev",nil)
+ if pre2 then
+ local pre1 = getfield(start,"pre")
+ if pre1 then
+ flush_node_list(pre1)
+ end
+ local pre1 = copy_node_list(replace1)
+ local tail1 = find_node_tail(pre1)
+ setfield(tail1,"next",pre2)
+ setfield(pre2,"prev",tail1)
+ setfield(start,"pre",pre1)
+ setfield(next,"pre",nil)
+ else
+ setfield(start,"pre",nil)
+ end
+ if post2 then
+ local post1 = getfield(start,"post")
+ if post1 then
+ flush_node_list(post1)
+ end
+ setfield(start,"post",post2)
+ else
+ setfield(start,"post",nil)
+ end
+ local tail1 = find_node_tail(replace1)
+ setfield(tail1,"next",replace2)
+ setfield(replace2,"prev",tail1)
+ setfield(start,"replace",replace1)
+ setfield(next,"replace",nil)
+ --
+ local nextnext = getnext(next)
+ setfield(nextnext,"prev",start)
+ setfield(start,"next",nextnext)
+ free_node(next)
+ else
+ -- maybe remove it
+ end
+end
+
+-- start is a mark and we need to keep that one
+
+local function markstoligature(head,start,stop,char)
+ if start == stop and getchar(start) == char then
+ return head, start
+ else
+ local prev = getprev(start)
+ local next = getnext(stop)
+ setfield(start,"prev",nil)
+ setfield(stop,"next",nil)
+ local base = copy_glyph(start)
+ if head == start then
+ head = base
+ end
+ resetinjection(base)
+ setfield(base,"char",char)
+ setfield(base,"subtype",ligature_code)
+ setfield(base,"components",start)
+ if prev then
+ setfield(prev,"next",base)
+ end
+ if next then
+ setfield(next,"prev",base)
+ end
+ setfield(base,"next",next)
+ setfield(base,"prev",prev)
+ return head, base
+ end
+end
+
+-- The next code is somewhat complicated by the fact that some fonts can have ligatures made
+-- from ligatures that themselves have marks. This was identified by Kai in for instance
+-- arabtype: KAF LAM SHADDA ALEF FATHA (0x0643 0x0644 0x0651 0x0627 0x064E). This becomes
+-- KAF LAM-ALEF with a SHADDA on the first and a FATHA op de second component. In a next
+-- iteration this becomes a KAF-LAM-ALEF with a SHADDA on the second and a FATHA on the
+-- third component.
+
+local function getcomponentindex(start)
+ if getid(start) ~= glyph_code then
+ return 0
+ elseif getsubtype(start) == ligature_code then
+ local i = 0
+ local components = getfield(start,"components")
+ while components do
+ i = i + getcomponentindex(components)
+ components = getnext(components)
+ end
+ return i
+ elseif not marks[getchar(start)] then
+ return 1
+ else
+ return 0
+ end
+end
+
+local a_noligature = attributes.private("noligature")
+local prehyphenchar = languages and languages.prehyphenchar
+local posthyphenchar = languages and languages.posthyphenchar
+----- preexhyphenchar = languages and languages.preexhyphenchar
+----- postexhyphenchar = languages and languages.postexhyphenchar
+
+if prehyphenchar then
+
+ -- okay
+
+elseif context then
+
+ report_warning("no language support") os.exit()
+
+else
+
+ local newlang = lang.new
+ local getpre = lang.prehyphenchar
+ local getpost = lang.posthyphenchar
+ -- local getpreex = lang.preexhyphenchar
+ -- local getpostex = lang.postexhyphenchar
+
+ prehyphenchar = function(l) local l = newlang(l) return l and getpre (l) or -1 end
+ posthyphenchar = function(l) local l = newlang(l) return l and getpost (l) or -1 end
+ -- preexhyphenchar = function(l) local l = newlang(l) return l and getpreex (l) or -1 end
+ -- postexhyphenchar = function(l) local l = newlang(l) return l and getpostex(l) or -1 end
+
+end
+
+local function addhyphens(template,pre,post)
+ -- inserted by hyphenation algorithm
+ local l = getfield(template,"lang")
+ local p = prehyphenchar(l)
+ if p and p > 0 then
+ local c = copy_node(template)
+ setfield(c,"char",p)
+ if pre then
+ local t = find_node_tail(pre)
+ setfield(t,"next",c)
+ setfield(c,"prev",t)
+ else
+ pre = c
+ end
+ end
+ local p = posthyphenchar(l)
+ if p and p > 0 then
+ local c = copy_node(template)
+ setfield(c,"char",p)
+ if post then
+ -- post has a prev nesting node .. alternatively we could
+ local prev = getprev(post)
+ setfield(c,"next",post)
+ setfield(post,"prev",c)
+ if prev then
+ setfield(prev,"next",c)
+ setfield(c,"prev",prev)
+ end
+ else
+ post = c
+ end
+ end
+ return pre, post
+end
+
+local function toligature(head,start,stop,char,dataset,sequence,markflag,discfound) -- brr head
+ if getattr(start,a_noligature) == 1 then
+ -- so we can do: e\noligature{ff}e e\noligature{f}fie (we only look at the first)
+ return head, start
+ end
+ if start == stop and getchar(start) == char then
+ resetinjection(start)
+ setfield(start,"char",char)
+ return head, start
+ end
+ -- needs testing (side effects):
+ local components = getfield(base,"components")
+ if components then
+ flush_node_list(components)
+ end
+ --
+ local prev = getprev(start)
+ local next = getnext(stop)
+ local comp = start
+ setfield(start,"prev",nil)
+ setfield(stop,"next",nil)
+ local base = copy_glyph(start)
+ if start == head then
+ head = base
+ end
+ resetinjection(base)
+ setfield(base,"char",char)
+ setfield(base,"subtype",ligature_code)
+ setfield(base,"components",comp) -- start can have components .. do we need to flush?
+ if prev then
+ setfield(prev,"next",base)
+ end
+ if next then
+ setfield(next,"prev",base)
+ end
+ setfield(base,"next",next)
+ setfield(base,"prev",prev)
+ if not discfound then
+ local deletemarks = markflag ~= "mark"
+ local components = start
+ local baseindex = 0
+ local componentindex = 0
+ local head = base
+ local current = base
+ -- first we loop over the glyphs in start .. stop
+ while start do
+ local char = getchar(start)
+ if not marks[char] then
+ baseindex = baseindex + componentindex
+ componentindex = getcomponentindex(start)
+ elseif not deletemarks then -- quite fishy
+ setligaindex(start,baseindex + getligaindex(start,componentindex))
+ if trace_marks then
+ logwarning("%s: keep mark %s, gets index %s",pref(dataset,sequence),gref(char),getligaindex(start))
+ end
+ local n = copy_node(start)
+ copyinjection(n,start)
+ head, current = insert_node_after(head,current,n) -- unlikely that mark has components
+ elseif trace_marks then
+ logwarning("%s: delete mark %s",pref(dataset,sequence),gref(char))
+ end
+ start = getnext(start)
+ end
+ -- we can have one accent as part of a lookup and another following
+ -- local start = components -- was wrong (component scanning was introduced when more complex ligs in devanagari was added)
+ local start = getnext(current)
+ while start and getid(start) == glyph_code do
+ local char = getchar(start)
+ if marks[char] then
+ setligaindex(start,baseindex + getligaindex(start,componentindex))
+ if trace_marks then
+ logwarning("%s: set mark %s, gets index %s",pref(dataset,sequence),gref(char),getligaindex(start))
+ end
+ else
+ break
+ end
+ start = getnext(start)
+ end
+ else
+ -- discfound ... forget about marks .. probably no scripts that hyphenate and have marks
+ local discprev = getfield(discfound,"prev")
+ local discnext = getfield(discfound,"next")
+ if discprev and discnext then
+ local subtype = getsubtype(discfound)
+ if subtype == discretionary_code then
+ local pre = getfield(discfound,"pre")
+ local post = getfield(discfound,"post")
+ local replace = getfield(discfound,"replace")
+ if not replace then -- todo: signal simple hyphen
+ local prev = getfield(base,"prev")
+ local copied = copy_node_list(comp)
+ setfield(discnext,"prev",nil) -- also blocks funny assignments
+ setfield(discprev,"next",nil) -- also blocks funny assignments
+ if pre then
+ setfield(comp,"next",pre)
+ setfield(pre,"prev",comp)
+ end
+ pre = comp
+ if post then
+ local tail = find_node_tail(post)
+ setfield(tail,"next",discnext)
+ setfield(discnext,"prev",tail)
+ setfield(post,"prev",nil)
+ else
+ post = discnext
+ end
+ setfield(prev,"next",discfound)
+ setfield(next,"prev",discfound)
+ setfield(discfound,"next",next)
+ setfield(discfound,"prev",prev)
+ setfield(base,"next",nil)
+ setfield(base,"prev",nil)
+ setfield(base,"components",copied)
+ setfield(discfound,"pre",pre)
+ setfield(discfound,"post",post)
+ setfield(discfound,"replace",base)
+ setfield(discfound,"subtype",discretionary_code)
+ base = prev -- restart
+ end
+ elseif discretionary_code == regular_code then
+ -- local prev = getfield(base,"prev")
+ -- local next = getfield(base,"next")
+ local copied = copy_node_list(comp)
+ setfield(discnext,"prev",nil) -- also blocks funny assignments
+ setfield(discprev,"next",nil) -- also blocks funny assignments
+ local pre, post = addhyphens(comp,comp,discnext,subtype) -- takes from components
+ setfield(prev,"next",discfound)
+ setfield(next,"prev",discfound)
+ setfield(discfound,"next",next)
+ setfield(discfound,"prev",prev)
+ setfield(base,"next",nil)
+ setfield(base,"prev",nil)
+ setfield(base,"components",copied)
+ setfield(discfound,"pre",pre)
+ setfield(discfound,"post",post)
+ setfield(discfound,"replace",base)
+ setfield(discfound,"subtype",discretionary_code)
+ base = next -- or restart
+ else
+ -- forget about it in generic usage
+ end
+ end
+ end
+ return head, base
+end
+
+local function multiple_glyphs(head,start,multiple,ignoremarks)
+ local nofmultiples = #multiple
+ if nofmultiples > 0 then
+ resetinjection(start)
+ setfield(start,"char",multiple[1])
+ if nofmultiples > 1 then
+ local sn = getnext(start)
+ for k=2,nofmultiples do -- todo: use insert_node
+-- untested:
+--
+-- while ignoremarks and marks[getchar(sn)] then
+-- local sn = getnext(sn)
+-- end
+ local n = copy_node(start) -- ignore components
+ resetinjection(n)
+ setfield(n,"char",multiple[k])
+ setfield(n,"next",sn)
+ setfield(n,"prev",start)
+ if sn then
+ setfield(sn,"prev",n)
+ end
+ setfield(start,"next",n)
+ start = n
+ end
+ end
+ return head, start, true
+ else
+ if trace_multiples then
+ logprocess("no multiple for %s",gref(getchar(start)))
+ end
+ return head, start, false
+ end
+end
+
+local function get_alternative_glyph(start,alternatives,value)
+ local n = #alternatives
+ if value == "random" then
+ local r = random(1,n)
+ return alternatives[r], trace_alternatives and formatters["value %a, taking %a"](value,r)
+ elseif value == "first" then
+ return alternatives[1], trace_alternatives and formatters["value %a, taking %a"](value,1)
+ elseif value == "last" then
+ return alternatives[n], trace_alternatives and formatters["value %a, taking %a"](value,n)
+ else
+ value = value == true and 1 or tonumber(value)
+ if type(value) ~= "number" then
+ return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
+ elseif value > n then
+ local defaultalt = otf.defaultnodealternate
+ if defaultalt == "first" then
+ return alternatives[n], trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
+ elseif defaultalt == "last" then
+ return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,n)
+ else
+ return false, trace_alternatives and formatters["invalid value %a, %s"](value,"out of range")
+ end
+ elseif value == 0 then
+ return getchar(start), trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
+ elseif value < 1 then
+ return alternatives[1], trace_alternatives and formatters["invalid value %a, taking %a"](value,1)
+ else
+ return alternatives[value], trace_alternatives and formatters["value %a, taking %a"](value,value)
+ end
+ end
+end
+
+-- handlers
+
+function handlers.gsub_single(head,start,dataset,sequence,replacement)
+ if trace_singles then
+ logprocess("%s: replacing %s by single %s",pref(dataset,sequence),gref(getchar(start)),gref(replacement))
+ end
+ resetinjection(start)
+ setfield(start,"char",replacement)
+ return head, start, true
+end
+
+function handlers.gsub_alternate(head,start,dataset,sequence,alternative)
+ local kind = dataset[4]
+ local what = dataset[1]
+ local value = what == true and tfmdata.shared.features[kind] or what
+ local choice, comment = get_alternative_glyph(start,alternative,value)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %a to %s, %s",pref(dataset,sequence),gref(getchar(start)),gref(choice),comment)
+ end
+ resetinjection(start)
+ setfield(start,"char",choice)
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %a for %s, %s",pref(dataset,sequence),value,gref(getchar(start)),comment)
+ end
+ end
+ return head, start, true
+end
+
+function handlers.gsub_multiple(head,start,dataset,sequence,multiple)
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple %s",pref(dataset,sequence),gref(getchar(start)),gref(multiple))
+ end
+ return multiple_glyphs(head,start,multiple,sequence.flags[1])
+end
+
+function handlers.gsub_ligature(head,start,dataset,sequence,ligature)
+ local current = getnext(start)
+ local stop = nil
+ local startchar = getchar(start)
+ if marks[startchar] then
+ while current do
+ local id = getid(current)
+ if id == glyph_code and getfont(current) == currentfont and getsubtype(current)<256 then
+ local lg = ligature[getchar(current)]
+ if lg then
+ stop = current
+ ligature = lg
+ current = getnext(current)
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ if stop then
+ local lig = ligature.ligature
+ if lig then
+ if trace_ligatures then
+ local stopchar = getchar(stop)
+ head, start = markstoligature(head,start,stop,lig)
+ logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(dataset,sequence),gref(startchar),gref(stopchar),gref(getchar(start)))
+ else
+ head, start = markstoligature(head,start,stop,lig)
+ end
+ return head, start, true, false
+ else
+ -- ok, goto next lookup
+ end
+ end
+ else
+ local skipmark = sequence.flags[1]
+ local discfound = false
+ local lastdisc = nil
+ while current do
+ local id = getid(current)
+ if id == glyph_code and getsubtype(current)<256 then -- not needed
+ if getfont(current) == currentfont then -- also not needed only when mark
+ local char = getchar(current)
+ if skipmark and marks[char] then
+ current = getnext(current)
+ else -- ligature is a tree
+ local lg = ligature[char] -- can there be multiple in a row? maybe in a bad font
+ if lg then
+ if not discfound and lastdisc then
+ discfound = lastdisc
+ lastdisc = nil
+ end
+ stop = current -- needed for fake so outside then
+ ligature = lg
+ current = getnext(current)
+ else
+ break
+ end
+ end
+ else
+ break
+ end
+ elseif id == disc_code then
+ lastdisc = current
+ current = getnext(current)
+ else
+ break
+ end
+ end
+ local lig = ligature.ligature
+ if lig then
+ if stop then
+ if trace_ligatures then
+ local stopchar = getchar(stop)
+ head, start = toligature(head,start,stop,lig,dataset,sequence,skipmark,discfound)
+ logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(dataset,sequence),gref(startchar),gref(stopchar),gref(lig))
+ else
+ head, start = toligature(head,start,stop,lig,dataset,sequence,skipmark,discfound)
+ end
+ else
+ -- weird but happens (in some arabic font)
+ resetinjection(start)
+ setfield(start,"char",lig)
+ if trace_ligatures then
+ logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(dataset,sequence),gref(startchar),gref(lig))
+ end
+ end
+ return head, start, true, discfound
+ else
+ -- weird but happens, pseudo ligatures ... just the components
+ end
+ end
+ return head, start, false, discfound
+end
+
+-- todo: have this one directly (all are pair now)
+
+function handlers.gpos_single(head,start,dataset,sequence,kerns,rlmode,step,i,injection)
+ local startchar = getchar(start)
+ if step.format == "pair" then
+ local dx, dy, w, h = setpair(start,factor,rlmode,sequence.flags[4],kerns,injection)
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(dataset,sequence),gref(startchar),dx,dy,w,h)
+ end
+ else
+ -- needs checking .. maybe no kerns format for single
+ local k = setkern(start,factor,rlmode,kerns,injection)
+ if trace_kerns then
+ logprocess("%s: shifting single %s by %p",pref(dataset,sequence),gref(startchar),k)
+ end
+ end
+ return head, start, false
+end
+
+function handlers.gpos_pair(head,start,dataset,sequence,kerns,rlmode,step,i,injection)
+ local snext = getnext(start)
+ if not snext then
+ return head, start, false
+ else
+ local prev = start
+ local done = false
+ while snext and getid(snext) == glyph_code and getfont(snext) == currentfont and getsubtype(snext)<256 do
+ local nextchar = getchar(snext)
+ local krn = kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev = snext
+ snext = getnext(snext)
+ elseif not krn then
+ break
+ elseif step.format == "pair" then
+ local a, b = krn[1], krn[2]
+ if a and #a > 0 then
+ local startchar = getchar(start)
+ local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,injection) -- characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(dataset,sequence),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b > 0 then
+ local startchar = getchar(start)
+ local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,injection) -- characters[nextchar])
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(dataset,sequence),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ done = true
+ break
+ elseif krn ~= 0 then
+ local k = setkern(snext,factor,rlmode,krn,injection)
+ if trace_kerns then
+ logprocess("%s: inserting kern %p between %s and %s",pref(dataset,sequence),k,gref(getchar(prev)),gref(nextchar))
+ end
+ done = true
+ break
+ end
+ end
+ return head, start, done
+ end
+end
+
+--[[ldx--
+<p>We get hits on a mark, but we're not sure if the it has to be applied so
+we need to explicitly test for basechar, baselig and basemark entries.</p>
+--ldx]]--
+
+-- can we share with chains if we have a stop == nil ?
+
+function handlers.gpos_mark2base(head,start,dataset,sequence,markanchors,rlmode)
+ local markchar = getchar(start)
+ if marks[markchar] then
+ local base = getprev(start) -- [glyph] [start=mark]
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ local basechar = getchar(base)
+ if marks[basechar] then
+ while true do
+ base = getprev(base)
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ basechar = getchar(base)
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(dataset,sequence),gref(markchar))
+ end
+ return head, start, false
+ end
+ end
+ end
+ local ba = markanchors[1][basechar]
+ if ba then
+ local ma = markanchors[2]
+ local dx, dy, bound = setmark(start,base,factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
+ pref(dataset,sequence),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head, start, true
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",pref(dataset,sequence))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(dataset,sequence),gref(markchar))
+ end
+ return head, start, false
+end
+
+-- ONCE CHECK HERE?
+
+function handlers.gpos_mark2ligature(head,start,dataset,sequence,markanchors,rlmode)
+ local markchar = getchar(start)
+ if marks[markchar] then
+ local base = getprev(start) -- [glyph] [optional marks] [start=mark]
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ local basechar = getchar(base)
+ if marks[basechar] then
+ while true do
+ base = getprev(base)
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ basechar = getchar(base)
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(dataset,sequence),gref(markchar))
+ end
+ return head, start, false
+ end
+ end
+ end
+ local ba = markanchors[1][basechar]
+ if ba then
+ local ma = markanchors[2]
+ if ma then
+ local index = getligaindex(start)
+ ba = ba[index]
+ if ba then
+ local dx, dy, bound = setmark(start,base,factor,rlmode,ba,ma,characters[basechar]) -- index
+ if trace_marks then
+ logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ pref(dataset,sequence),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head, start, true
+ else
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(dataset,sequence),gref(markchar),gref(basechar),index)
+ end
+ end
+ end
+ elseif trace_bugs then
+ -- logwarning("%s: char %s is missing in font",pref(dataset,sequence),gref(basechar))
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",pref(dataset,sequence))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(dataset,sequence),gref(markchar))
+ end
+ return head, start, false
+end
+
+function handlers.gpos_mark2mark(head,start,dataset,sequence,markanchors,rlmode)
+ local markchar = getchar(start)
+ if marks[markchar] then
+ local base = getprev(start) -- [glyph] [basemark] [start=mark]
+ local slc = getligaindex(start)
+ if slc then -- a rather messy loop ... needs checking with husayni
+ while base do
+ local blc = getligaindex(base)
+ if blc and blc ~= slc then
+ base = getprev(base)
+ else
+ break
+ end
+ end
+ end
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then -- subtype test can go
+ local basechar = getchar(base)
+ local ba = markanchors[1][basechar] -- slot 1 has been made copy of the class hash
+ if ba then
+ local ma = markanchors[2]
+ local dx, dy, bound = setmark(start,base,factor,rlmode,ba,ma,characters[basechar],true)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ pref(dataset,sequence),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(dataset,sequence),gref(markchar))
+ end
+ return head, start, false
+end
+
+function handlers.gpos_cursive(head,start,dataset,sequence,exitanchors,rlmode,step,i) -- to be checked
+ local alreadydone = cursonce and getprop(start,a_cursbase)
+ if not alreadydone then
+ local done = false
+ local startchar = getchar(start)
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(dataset,sequence),gref(startchar))
+ end
+ else
+ local nxt = getnext(start)
+ while not done and nxt and getid(nxt) == glyph_code and getfont(nxt) == currentfont and getsubtype(nxt)<256 do
+ local nextchar = getchar(nxt)
+ if marks[nextchar] then
+ -- should not happen (maybe warning)
+ nxt = getnext(nxt)
+ else
+ local exit = exitanchors[3]
+ if exit then
+ local entry = exitanchors[1][nextchar]
+ if entry then
+ entry = entry[2]
+ if entry then
+ local dx, dy, bound = setcursive(start,nxt,factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in %s mode",pref(dataset,sequence),gref(startchar),gref(nextchar),dx,dy,anchor,bound,mref(rlmode))
+ end
+ done = true
+ end
+ end
+ end
+ break
+ end
+ end
+ end
+ return head, start, done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(dataset,sequence),gref(getchar(start)),alreadydone)
+ end
+ return head, start, false
+ end
+end
+
+--[[ldx--
+<p>I will implement multiple chain replacements once I run into a font that uses
+it. It's not that complex to handle.</p>
+--ldx]]--
+
+local chainprocs = { }
+
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_subchain(...)
+end
+
+local logwarning = report_subchain
+
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_chain(...)
+end
+
+local logwarning = report_chain
+
+-- We could share functions but that would lead to extra function calls with many
+-- arguments, redundant tests and confusing messages.
+
+-- The reversesub is a special case, which is why we need to store the replacements
+-- in a bit weird way. There is no lookup and the replacement comes from the lookup
+-- itself. It is meant mostly for dealing with Urdu.
+
+function chainprocs.reversesub(head,start,stop,dataset,sequence,replacements,rlmode)
+ local char = getchar(start)
+ local replacement = replacements[char]
+ if replacement then
+ if trace_singles then
+ logprocess("%s: single reverse replacement of %s by %s",cref(dataset,sequence),gref(char),gref(replacement))
+ end
+ resetinjection(start)
+ setfield(start,"char",replacement)
+ return head, start, true
+ else
+ return head, start, false
+ end
+end
+
+--[[ldx--
+<p>This chain stuff is somewhat tricky since we can have a sequence of actions to be
+applied: single, alternate, multiple or ligature where ligature can be an invalid
+one in the sense that it will replace multiple by one but not neccessary one that
+looks like the combination (i.e. it is the counterpart of multiple then). For
+example, the following is valid:</p>
+
+<typing>
+<line>xxxabcdexxx [single a->A][multiple b->BCD][ligature cde->E] xxxABCDExxx</line>
+</typing>
+
+<p>Therefore we we don't really do the replacement here already unless we have the
+single lookup case. The efficiency of the replacements can be improved by deleting
+as less as needed but that would also make the code even more messy.</p>
+--ldx]]--
+
+-- local function delete_till_stop(head,start,stop,ignoremarks) -- keeps start
+-- local n = 1
+-- if start == stop then
+-- -- done
+-- elseif ignoremarks then
+-- repeat -- start x x m x x stop => start m
+-- local next = getnext(start)
+-- if not marks[getchar(next)] then
+-- local components = getfield(next,"components")
+-- if components then -- probably not needed
+-- flush_node_list(components)
+-- end
+-- head = delete_node(head,next)
+-- end
+-- n = n + 1
+-- until next == stop
+-- else -- start x x x stop => start
+-- repeat
+-- local next = getnext(start)
+-- local components = getfield(next,"components")
+-- if components then -- probably not needed
+-- flush_node_list(components)
+-- end
+-- head = delete_node(head,next)
+-- n = n + 1
+-- until next == stop
+-- end
+-- return head, n
+-- end
+
+--[[ldx--
+<p>Here we replace start by a single variant.</p>
+--ldx]]--
+
+-- To be done (example needed): what if > 1 steps
+
+-- this is messy: do we need this disc checking also in alternaties?
+
+local function reportmoresteps(dataset,sequence)
+ logwarning("%s: more than 1 step",cref(dataset,sequence))
+end
+
+function chainprocs.gsub_single(head,start,stop,dataset,sequence,currentlookup,chainindex)
+ local steps = currentlookup.steps
+ local nofsteps = currentlookup.nofsteps
+ if nofsteps > 1 then
+ reportmoresteps(dataset,sequence)
+ end
+ local current = start
+ while current do
+ if getid(current) == glyph_code then
+ local currentchar = getchar(current)
+ local replacement = steps[1].coverage[currentchar]
+ if not replacement or replacement == "" then
+ if trace_bugs then
+ logwarning("%s: no single for %s",cref(dataset,sequence,chainindex),gref(currentchar))
+ end
+ else
+ if trace_singles then
+ logprocess("%s: replacing single %s by %s",cref(dataset,sequence,chainindex),gref(currentchar),gref(replacement))
+ end
+ resetinjection(current)
+ if check_discretionaries then
+ -- some fonts use a chain lookup to replace e.g. an f in a fi ligature
+ -- and there can be a disc node in between ... the next code tries to catch
+ -- this
+ local next = getnext(current)
+ local prev = getprev(current) -- todo: just remember it above
+ local done = false
+ if next then
+ if getid(next) == disc_code then
+ local subtype = getsubtype(next)
+ if subtype == discretionary_code then
+ setfield(next,"prev",prev)
+ setfield(prev,"next",next)
+ setfield(current,"prev",nil)
+ setfield(current,"next",nil)
+ local replace = getfield(next,"replace")
+ local pre = getfield(next,"pre")
+ local new = copy_node(current)
+ setfield(new,"char",replacement)
+ if replace then
+ setfield(new,"next",replace)
+ setfield(replace,"prev",new)
+ end
+ if pre then
+ setfield(current,"next",pre)
+ setfield(pre,"prev",current)
+ end
+ setfield(next,"replace",new) -- also updates tail
+ setfield(next,"pre",current) -- also updates tail
+ end
+ start = next
+ done = true
+ local next = getnext(start)
+ if next and getid(next) == disc_code then
+ collapse_disc(start,next)
+ end
+ end
+ end
+ if not done and prev then
+ if getid(prev) == disc_code then
+ local subtype = getsubtype(prev)
+ if subtype == discretionary_code then
+ setfield(next,"prev",prev)
+ setfield(prev,"next",next)
+ setfield(current,"prev",nil)
+ setfield(current,"next",nil)
+ local replace = getfield(prev,"replace")
+ local post = getfield(prev,"post")
+ local new = copy_node(current)
+ setfield(new,"char",replacement)
+ if replace then
+ local tail = find_node_tail(replace)
+ setfield(tail,"next",new)
+ setfield(new,"prev",tail)
+ else
+ replace = new
+ end
+ if post then
+ local tail = find_node_tail(post)
+ setfield(tail,"next",current)
+ setfield(current,"prev",tail)
+ else
+ post = current
+ end
+ setfield(prev,"replace",replace) -- also updates tail
+ setfield(prev,"post",post) -- also updates tail
+ start = prev
+ done = true
+ end
+ end
+ end
+ if not done then
+ setfield(current,"char",replacement)
+ end
+ else
+ setfield(current,"char",replacement)
+ end
+ end
+ return head, start, true
+ elseif current == stop then
+ break
+ else
+ current = getnext(current)
+ end
+ end
+ return head, start, false
+end
+
+--[[ldx--
+<p>Here we replace start by a sequence of new glyphs.</p>
+--ldx]]--
+
+-- disc?
+
+function chainprocs.gsub_multiple(head,start,stop,dataset,sequence,currentlookup)
+ local steps = currentlookup.steps
+ local nofsteps = currentlookup.nofsteps
+ if nofsteps > 1 then
+ reportmoresteps(dataset,sequence)
+ end
+ local startchar = getchar(start)
+ local replacement = steps[1].coverage[startchar]
+ if not replacement or replacement == "" then
+ if trace_bugs then
+ logwarning("%s: no multiple for %s",cref(dataset,sequence),gref(startchar))
+ end
+ else
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple characters %s",cref(dataset,sequence),gref(startchar),gref(replacements))
+ end
+ return multiple_glyphs(head,start,replacement,currentlookup.flags[1]) -- not sequence.flags?
+ end
+ return head, start, false
+end
+
+--[[ldx--
+<p>Here we replace start by new glyph. First we delete the rest of the match.</p>
+--ldx]]--
+
+-- char_1 mark_1 -> char_x mark_1 (ignore marks)
+-- char_1 mark_1 -> char_x
+
+-- to be checked: do we always have just one glyph?
+-- we can also have alternates for marks
+-- marks come last anyway
+-- are there cases where we need to delete the mark
+
+-- maybe we can share them ...
+
+-- disc ?
+
+function chainprocs.gsub_alternate(head,start,stop,dataset,sequence,currentlookup)
+ local steps = currentlookup.steps
+ local nofsteps = currentlookup.nofsteps
+ if nofsteps > 1 then
+ reportmoresteps(dataset,sequence)
+ end
+ local kind = dataset[4]
+ local what = dataset[1]
+ local value = what == true and tfmdata.shared.features[kind] or what
+ local current = start
+ while current do
+ if getid(current) == glyph_code then -- is this check needed?
+ local currentchar = getchar(current)
+ local alternatives = steps[1].coverage[currentchar]
+ if alternatives then
+ local choice, comment = get_alternative_glyph(current,alternatives,value)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %a to %s, %s",cref(dataset,sequence),gref(char),choice,gref(choice),comment)
+ end
+ resetinjection(start)
+ setfield(start,"char",choice)
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %a for %s, %s",cref(dataset,sequence),value,gref(char),comment)
+ end
+ end
+ end
+ return head, start, true
+ elseif current == stop then
+ break
+ else
+ current = getnext(current)
+ end
+ end
+ return head, start, false
+end
+
+--[[ldx--
+<p>When we replace ligatures we use a helper that handles the marks. I might change
+this function (move code inline and handle the marks by a separate function). We
+assume rather stupid ligatures (no complex disc nodes).</p>
+--ldx]]--
+
+function chainprocs.gsub_ligature(head,start,stop,dataset,sequence,currentlookup,chainindex)
+ local steps = currentlookup.steps
+ local nofsteps = currentlookup.nofsteps
+ if nofsteps > 1 then
+ reportmoresteps(dataset,sequence)
+ end
+ local startchar = getchar(start)
+ local ligatures = steps[1].coverage[startchar]
+ if not ligatures then
+ if trace_bugs then
+ logwarning("%s: no ligatures starting with %s",cref(dataset,sequence,chainindex),gref(startchar))
+ end
+ else
+ local current = getnext(start)
+ local discfound = false
+ local last = stop
+ local nofreplacements = 1
+ local skipmark = currentlookup.flags[1] -- sequence.flags?
+ while current do
+ local id = getid(current)
+ if id == disc_code then
+ if not discfound then
+ discfound = current
+ end
+ if current == stop then
+ break -- okay? or before the disc
+ else
+ current = getnext(current)
+ end
+ else
+ local schar = getchar(current)
+ if skipmark and marks[schar] then -- marks
+ -- if current == stop then -- maybe add this
+ -- break
+ -- else
+ current = getnext(current)
+ -- end
+ else
+ local lg = ligatures[schar]
+ if lg then
+ ligatures = lg
+ last = current
+ nofreplacements = nofreplacements + 1
+ if current == stop then
+ break
+ else
+ current = getnext(current)
+ end
+ else
+ break
+ end
+ end
+ end
+ end
+ local ligature = ligatures.ligature
+ if ligature then
+ if chainindex then
+ stop = last
+ end
+ if trace_ligatures then
+ if start == stop then
+ logprocess("%s: replacing character %s by ligature %s case 3",cref(dataset,sequence,chainindex),gref(startchar),gref(ligature))
+ else
+ logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(dataset,sequence,chainindex),gref(startchar),gref(getchar(stop)),gref(ligature))
+ end
+ end
+ head, start = toligature(head,start,stop,ligature,dataset,sequence,skipmark,discfound)
+ return head, start, true, nofreplacements, discfound
+ elseif trace_bugs then
+ if start == stop then
+ logwarning("%s: replacing character %s by ligature fails",cref(dataset,sequence,chainindex),gref(startchar))
+ else
+ logwarning("%s: replacing character %s upto %s by ligature fails",cref(dataset,sequence,chainindex),gref(startchar),gref(getchar(stop)))
+ end
+ end
+ end
+ return head, start, false, 0, false
+end
+
+function chainprocs.gpos_single(head,start,stop,dataset,sequence,currentlookup,rlmode,chainindex)
+ local steps = currentlookup.steps
+ local nofsteps = currentlookup.nofsteps
+ if nofsteps > 1 then
+ reportmoresteps(dataset,sequence)
+ end
+ local startchar = getchar(start)
+ local step = steps[1]
+ local kerns = step.coverage[startchar]
+ if not kerns then
+ -- skip
+ elseif step.format == "pair" then
+ local dx, dy, w, h = setpair(start,factor,rlmode,sequence.flags[4],kerns) -- currentlookup.flags ?
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(dataset,sequence),gref(startchar),dx,dy,w,h)
+ end
+ else -- needs checking .. maybe no kerns format for single
+ local k = setkern(start,factor,rlmode,kerns,injection)
+ if trace_kerns then
+ logprocess("%s: shifting single %s by %p",cref(dataset,sequence),gref(startchar),k)
+ end
+ end
+ return head, start, false
+end
+
+-- when machines become faster i will make a shared function
+
+function chainprocs.gpos_pair(head,start,stop,dataset,sequence,currentlookup,rlmode,chainindex)
+ local steps = currentlookup.steps
+ local nofsteps = currentlookup.nofsteps
+ if nofsteps > 1 then
+ reportmoresteps(dataset,sequence)
+ end
+ local snext = getnext(start)
+ if snext then
+ local startchar = getchar(start)
+ local step = steps[1]
+ local kerns = step.coverage[startchar] -- always 1 step
+ if kerns then
+ local prev = start
+ local done = false
+ while snext and getid(snext) == glyph_code and getfont(snext) == currentfont and getsubtype(snext)<256 do
+ local nextchar = getchar(snext)
+ local krn = kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev = snext
+ snext = getnext(snext)
+ elseif not krn then
+ break
+ elseif step.format == "pair" then
+ local a, b = krn[1], krn[2]
+ if a and #a > 0 then
+ local startchar = getchar(start)
+ local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a) -- currentlookups flags?
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(dataset,sequence),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b > 0 then
+ local startchar = getchar(start)
+ local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b)
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(dataset,sequence),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ done = true
+ break
+ elseif krn ~= 0 then
+ local k = setkern(snext,factor,rlmode,krn)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",cref(dataset,sequence),k,gref(getchar(prev)),gref(nextchar))
+ end
+ done = true
+ break
+ end
+ end
+ return head, start, done
+ end
+ end
+ return head, start, false
+end
+
+function chainprocs.gpos_mark2base(head,start,stop,dataset,sequence,currentlookup,rlmode)
+ local steps = currentlookup.steps
+ local nofsteps = currentlookup.nofsteps
+ if nofsteps > 1 then
+ reportmoresteps(dataset,sequence)
+ end
+ local markchar = getchar(start)
+ if marks[markchar] then
+ local markanchors = steps[1].coverage[markchar] -- always 1 step
+ if markanchors then
+ local base = getprev(start) -- [glyph] [start=mark]
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ local basechar = getchar(base)
+ if marks[basechar] then
+ while true do
+ base = getprev(base)
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ basechar = getchar(base)
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(dataset,sequence),gref(markchar))
+ end
+ return head, start, false
+ end
+ end
+ end
+ local ba = markanchors[1][basechar]
+ if ba then
+ local ma = markanchors[2]
+ if ma then
+ local dx, dy, bound = setmark(start,base,factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
+ cref(dataset,sequence),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",cref(dataset,sequence))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(dataset,sequence),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(dataset,sequence),gref(markchar))
+ end
+ return head, start, false
+end
+
+function chainprocs.gpos_mark2ligature(head,start,stop,dataset,sequence,currentlookup,rlmode)
+ local steps = currentlookup.steps
+ local nofsteps = currentlookup.nofsteps
+ if nofsteps > 1 then
+ reportmoresteps(dataset,sequence)
+ end
+ local markchar = getchar(start)
+ if marks[markchar] then
+ local markanchors = steps[1].coverage[markchar] -- always 1 step
+ if markanchors then
+ local base = getprev(start) -- [glyph] [optional marks] [start=mark]
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ local basechar = getchar(base)
+ if marks[basechar] then
+ while true do
+ base = getprev(base)
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ basechar = getchar(base)
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",cref(dataset,sequence),markchar)
+ end
+ return head, start, false
+ end
+ end
+ end
+ local ba = markanchors[1][basechar]
+ if ba then
+ local ma = markanchors[2]
+ if ma then
+ local index = getligaindex(start)
+ ba = ba[index]
+ if ba then
+ local dx, dy, bound = setmark(start,base,factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ cref(dataset,sequence),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s, prev node is no char",cref(dataset,sequence))
+ end
+ elseif trace_bugs then
+ logwarning("%s, mark %s has no anchors",cref(dataset,sequence),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s, mark %s is no mark",cref(dataset,sequence),gref(markchar))
+ end
+ return head, start, false
+end
+
+function chainprocs.gpos_mark2mark(head,start,stop,dataset,sequence,currentlookup,rlmode)
+ local steps = currentlookup.steps
+ local nofsteps = currentlookup.nofsteps
+ if nofsteps > 1 then
+ reportmoresteps(dataset,sequence)
+ end
+ local markchar = getchar(start)
+ if marks[markchar] then
+ local markanchors = steps[1].coverage[markchar] -- always 1 step
+ if markanchors then
+ local base = getprev(start) -- [glyph] [basemark] [start=mark]
+ local slc = getligaindex(start)
+ if slc then -- a rather messy loop ... needs checking with husayni
+ while base do
+ local blc = getligaindex(base)
+ if blc and blc ~= slc then
+ base = getprev(base)
+ else
+ break
+ end
+ end
+ end
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then -- subtype test can go
+ local basechar = getchar(base)
+ local ba = markanchors[1][basechar]
+ if ba then
+ local ma = markanchors[2]
+ if ma then
+ local dx, dy, bound = setmark(start,base,factor,rlmode,ba,ma,characters[basechar],true)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ cref(dataset,sequence),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no mark",cref(dataset,sequence))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(dataset,sequence),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(dataset,sequence),gref(markchar))
+ end
+ return head, start, false
+end
+
+function chainprocs.gpos_cursive(head,start,stop,dataset,sequence,currentlookup,rlmode)
+ local steps = currentlookup.steps
+ local nofsteps = currentlookup.nofsteps
+ if nofsteps > 1 then
+ reportmoresteps(dataset,sequence)
+ end
+ local alreadydone = cursonce and getprop(start,a_cursbase) -- also mkmk?
+ if not alreadydone then
+ local startchar = getchar(start)
+ local exitanchors = steps[1].coverage[startchar] -- always 1 step
+ if exitanchors then
+ local done = false
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(dataset,sequence),gref(startchar))
+ end
+ else
+ local nxt = getnext(start)
+ while not done and nxt and getid(nxt) == glyph_code and getfont(nxt) == currentfont and getsubtype(nxt)<256 do
+ local nextchar = getchar(nxt)
+ if marks[nextchar] then
+ -- should not happen (maybe warning)
+ nxt = getnext(nxt)
+ else
+ local exit = exitanchors[3]
+ if exit then
+ local entry = exitanchors[1][nextchar]
+ if entry then
+ entry = entry[2]
+ if entry then
+ local dx, dy, bound = setcursive(start,nxt,factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in %s mode",pref(dataset,sequence),gref(startchar),gref(nextchar),dx,dy,anchor,bound,mref(rlmode))
+ end
+ done = true
+ break
+ end
+ end
+ elseif trace_bugs then
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
+ end
+ break
+ end
+ end
+ end
+ return head, start, done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(dataset,sequence),gref(getchar(start)),alreadydone)
+ end
+ return head, start, false
+ end
+ end
+ return head, start, false
+end
+
+-- what pointer to return, spec says stop
+-- to be discussed ... is bidi changer a space?
+-- elseif char == zwnj and sequence[n][32] then -- brrr
+
+local function show_skip(dataset,sequence,char,ck,class)
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(dataset,sequence),gref(char),class,ck[1],ck[8])
+end
+
+local function handle_contextchain(head,start,dataset,sequence,contexts,rlmode)
+ local flags = sequence.flags
+ local done = false
+ local skipmark = flags[1]
+ local skipligature = flags[2]
+ local skipbase = flags[3]
+ local markclass = sequence.markclass
+ local skipped = false
+ for k=1,#contexts do
+ local match = true
+ local current = start
+ local last = start
+ local ck = contexts[k]
+ local seq = ck[3]
+ local s = #seq
+ -- f..l = mid string
+ if s == 1 then
+ -- never happens
+ match = getid(current) == glyph_code and getfont(current) == currentfont and getsubtype(current)<256 and seq[1][getchar(current)]
+ else
+ -- maybe we need a better space check (maybe check for glue or category or combination)
+ -- we cannot optimize for n=2 because there can be disc nodes
+ local f = ck[4]
+ local l = ck[5]
+ -- current match
+ if f == 1 and f == l then -- current only
+ -- already a hit -- do we need to check for mark?
+ -- match = true
+ else -- before/current/after | before/current | current/after
+ -- no need to test first hit (to be optimized)
+ if f == l then -- new, else last out of sync (f is > 1)
+ -- match = true
+ else
+ local n = f + 1
+ last = getnext(last)
+ while n <= l do
+ if last then
+ local id = getid(last)
+ if id == glyph_code then
+ if getfont(last) == currentfont and getsubtype(last)<256 then
+ local char = getchar(last)
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class or "base"
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ skipped = true
+ if trace_skips then
+ show_skip(dataset,sequence,char,ck,class)
+ end
+ last = getnext(last)
+ elseif seq[n][char] then
+ if n < l then
+ last = getnext(last)
+ end
+ n = n + 1
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ elseif id == disc_code then
+ if check_discretionaries then
+ local replace = getfield(last,"replace")
+ if replace then
+ -- so far we never entered this branch
+ while replace do
+ if seq[n][getchar(replace)] then
+ n = n + 1
+ replace = getnext(replace)
+ if not replace then
+ break
+ elseif n > l then
+ -- match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ end
+ if not match then
+ break
+ elseif check_discretionaries == "trace" then
+ report_chain("check disc action in current")
+ end
+ else
+ last = getnext(last) -- no skipping here
+ end
+ else
+ last = getnext(last) -- no skipping here
+ end
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ end
+ end
+ end
+ -- before
+ if match and f > 1 then
+ local prev = getprev(start)
+ if prev then
+ local n = f-1
+ while n >= 1 do
+ if prev then
+ local id = getid(prev)
+ if id == glyph_code then
+ if getfont(prev) == currentfont and getsubtype(prev)<256 then -- normal char
+ local char = getchar(prev)
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class or "base"
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ skipped = true
+ if trace_skips then
+ show_skip(dataset,sequence,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n = n -1
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ elseif id == disc_code then
+ -- the special case: f i where i becomes dottless i ..
+ if check_discretionaries then
+ local replace = getfield(prev,"replace")
+ if replace then
+ -- we seldom enter this branch (e.g. on brill efficient)
+ replace = find_node_tail(replace)
+ local finish = getprev(replace)
+ while replace do
+ if seq[n][getchar(replace)] then
+ n = n - 1
+ replace = getprev(replace)
+ if not replace or replace == finish then
+ break
+ elseif n < 1 then
+ -- match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ end
+ if not match then
+ break
+ elseif check_discretionaries == "trace" then
+ report_chain("check disc action in before")
+ end
+ else
+ -- skip 'm
+ end
+ else
+ -- skip 'm
+ end
+ elseif seq[n][32] then
+ n = n -1
+ else
+ match = false
+ break
+ end
+ prev = getprev(prev)
+ elseif seq[n][32] then -- somewhat special, as zapfino can have many preceding spaces
+ n = n - 1
+ else
+ match = false
+ break
+ end
+ end
+ else
+ match = false
+ end
+ end
+ -- after
+ if match and s > l then
+ local current = last and getnext(last)
+ if current then
+ -- removed optimization for s-l == 1, we have to deal with marks anyway
+ local n = l + 1
+ while n <= s do
+ if current then
+ local id = getid(current)
+ if id == glyph_code then
+ if getfont(current) == currentfont and getsubtype(current)<256 then -- normal char
+ local char = getchar(current)
+ local ccd = descriptions[char] -- TODO: we have a marks array !
+ if ccd then
+ local class = ccd.class or "base"
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ skipped = true
+ if trace_skips then
+ show_skip(dataset,sequence,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n = n + 1
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ elseif id == disc_code then
+ if check_discretionaries then
+ local replace = getfield(current,"replace")
+ if replace then
+ -- so far we never entered this branch
+ while replace do
+ if seq[n][getchar(replace)] then
+ n = n + 1
+ replace = getnext(replace)
+ if not replace then
+ break
+ elseif n > s then
+ break
+ end
+ else
+ match = false
+ break
+ end
+ end
+ if not match then
+ break
+ elseif check_discretionaries == "trace" then
+ report_chain("check disc action in after")
+ end
+ else
+ -- skip 'm
+ end
+ else
+ -- skip 'm
+ end
+ elseif seq[n][32] then -- brrr
+ n = n + 1
+ else
+ match = false
+ break
+ end
+ current = getnext(current)
+ elseif seq[n][32] then
+ n = n + 1
+ else
+ match = false
+ break
+ end
+ end
+ else
+ match = false
+ end
+ end
+ end
+ if match then
+ -- can lookups be of a different type ?
+ if trace_contexts then
+ local rule = ck[1]
+ local lookuptype = ck[8]
+ local first = ck[4]
+ local last = ck[5]
+ local char = getchar(start)
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a",
+ cref(dataset,sequence),rule,gref(char),first-1,last-first+1,s-last,lookuptype)
+ end
+ local chainlookups = ck[6]
+ if chainlookups then
+ local nofchainlookups = #chainlookups
+ -- we can speed this up if needed
+ if nofchainlookups == 1 then
+ local chainlookup = chainlookups[1]
+ local chainkind = chainlookup.type
+ local chainproc = chainprocs[chainkind]
+ if chainproc then
+ local ok
+ head, start, ok = chainproc(head,start,last,dataset,sequence,chainlookup,rlmode,1)
+ if ok then
+ done = true
+ end
+ else
+ logprocess("%s: %s is not yet supported",cref(dataset,sequence),chainkind)
+ end
+ else
+ local i = 1
+ while start and true do
+ if skipped then
+ while true do -- todo: use properties
+ local char = getchar(start)
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class or "base"
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ start = getnext(start)
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ end
+ -- see remark in ms standard under : LookupType 5: Contextual Substitution Subtable
+ local chainlookup = chainlookups[1] -- should be i when they can be different
+ if not chainlookup then
+ -- we just advance
+ i = i + 1
+ else
+ local chainkind = chainlookup.type
+ local chainproc = chainprocs[chainkind]
+ if chainproc then
+ local ok, n
+ head, start, ok, n = chainproc(head,start,last,dataset,sequence,chainlookup,rlmode,i)
+ -- messy since last can be changed !
+ if ok then
+ done = true
+ if n and n > 1 then
+ -- we have a ligature (cf the spec we advance one but we really need to test it
+ -- as there are fonts out there that are fuzzy and have too many lookups:
+ --
+ -- U+1105 U+119E U+1105 U+119E : sourcehansansklight: script=hang ccmp=yes
+ --
+ if i + n > nofchainlookups then
+ -- if trace_contexts then
+ -- logprocess("%s: quitting lookups",cref(dataset,sequence))
+ -- end
+ break
+ else
+ -- we need to carry one
+ end
+ end
+ end
+ else
+ -- actually an error
+ logprocess("%s: %s is not yet supported",cref(dataset,sequence),chainkind)
+ end
+ i = i + 1
+ end
+ if i > nofchainlookups or not start then
+ break
+ elseif start then
+ start = getnext(start)
+ end
+ end
+ end
+ else
+ local replacements = ck[7]
+ if replacements then
+ head, start, done = chainprocs.reversesub(head,start,last,dataset,sequence,replacements,rlmode)
+ else
+ done = quit_on_no_replacement -- can be meant to be skipped / quite inconsistent in fonts
+ if trace_contexts then
+ logprocess("%s: skipping match",cref(dataset,sequence))
+ end
+ end
+ end
+ end
+ end
+ return head, start, done
+end
+
+handlers.gsub_context = handle_contextchain
+handlers.gsub_contextchain = handle_contextchain
+handlers.gsub_reversecontextchain = handle_contextchain
+handlers.gpos_contextchain = handle_contextchain
+handlers.gpos_context = handle_contextchain
+
+local missing = { } -- we only report once
+
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_process(...)
+end
+
+local logwarning = report_process
+
+local function report_missing_cache(dataset,sequence)
+ local f = missing[currentfont] if not f then f = { } missing[currentfont] = f end
+ local t = f[typ] if not t then t = { } f[typ] = t end
+ if not t[sequence] then
+ t[seqence] = true
+ logwarning("missing cache for feature %a, lookup %a, type %a, font %a, name %a",
+ dataset[4],sequence.name,sequence.type,currentfont,tfmdata.properties.fullname)
+ end
+end
+
+local resolved = { } -- we only resolve a font,script,language pair once
+
+-- todo: pass all these 'locals' in a table
+
+local sequencelists = setmetatableindex(function(t,font)
+ local sequences = fontdata[font].resources.sequences
+ if not sequences or not next(sequences) then
+ sequences = false
+ end
+ t[font] = sequences
+ return sequences
+end)
+
+-- fonts.hashes.sequences = sequencelists
+
+local autofeatures = fonts.analyzers.features -- was: constants
+
+local function initialize(sequence,script,language,enabled)
+ local features = sequence.features
+ if features then
+ local order = sequence.order
+ if order then
+ for i=1,#order do --
+ local kind = order[i] --
+ local valid = enabled[kind]
+ if valid then
+ local scripts = features[kind] --
+ local languages = scripts[script] or scripts[wildcard]
+ if languages and (languages[language] or languages[wildcard]) then
+ return {
+ valid,
+ autofeatures[kind] or false,
+ sequence.chain or 0,
+ kind,
+ sequence,
+ }
+ end
+ end
+ end
+ else
+ -- can't happen
+ end
+ end
+ return false
+end
+
+function otf.dataset(tfmdata,font) -- generic variant, overloaded in context
+ local shared = tfmdata.shared
+ local properties = tfmdata.properties
+ local language = properties.language or "dflt"
+ local script = properties.script or "dflt"
+ local enabled = shared.features
+ local res = resolved[font]
+ if not res then
+ res = { }
+ resolved[font] = res
+ end
+ local rs = res[script]
+ if not rs then
+ rs = { }
+ res[script] = rs
+ end
+ local rl = rs[language]
+ if not rl then
+ rl = {
+ -- indexed but we can also add specific data by key
+ }
+ rs[language] = rl
+ local sequences = tfmdata.resources.sequences
+ for s=1,#sequences do
+ local v = enabled and initialize(sequences[s],script,language,enabled)
+ if v then
+ rl[#rl+1] = v
+ end
+ end
+ end
+ return rl
+end
+
+-- assumptions:
+--
+-- * languages that use complex disc nodes
+
+-- optimization comes later ...
+
+local function kernrun(disc,run) -- we can assume that prev and next are glyphs
+ if trace_kernruns then
+ report_run("kern") -- will be more detailed
+ end
+ --
+ local prev = getprev(disc) -- todo, keep these in the main loop
+ local next = getnext(disc) -- todo, keep these in the main loop
+ --
+ local pre = getfield(disc,"pre")
+ if not pre then
+ -- go on
+ elseif prev then
+ local nest = getprev(pre)
+ setfield(pre,"prev",prev)
+ setfield(prev,"next",pre)
+ run(prev,"preinjections")
+ setfield(pre,"prev",nest)
+ setfield(prev,"next",disc)
+ else
+ run(pre,"preinjections")
+ end
+ --
+ local post = getfield(disc,"post")
+ if not post then
+ -- go on
+ elseif next then
+ local tail = find_node_tail(post)
+ setfield(tail,"next",next)
+ setfield(next,"prev",tail)
+ run(post,"postinjections",tail)
+ setfield(tail,"next",nil)
+ setfield(next,"prev",disc)
+ else
+ run(post,"postinjections")
+ end
+ --
+ local replace = getfield(disc,"replace")
+ if not replace then
+ -- this should be already done by discfound
+ elseif prev and next then
+ local tail = find_node_tail(replace)
+ local nest = getprev(replace)
+ setfield(replace,"prev",prev)
+ setfield(prev,"next",replace)
+ setfield(tail,"next",next)
+ setfield(next,"prev",tail)
+ run(prev,"replaceinjections",tail)
+ setfield(replace,"prev",nest)
+ setfield(prev,"next",disc)
+ setfield(tail,"next",nil)
+ setfield(next,"prev",disc)
+ elseif prev then
+ local nest = getprev(replace)
+ setfield(replace,"prev",prev)
+ setfield(prev,"next",replace)
+ run(prev,"replaceinjections")
+ setfield(replace,"prev",nest)
+ setfield(prev,"next",disc)
+ elseif next then
+ local tail = find_node_tail(replace)
+ setfield(tail,"next",next)
+ setfield(next,"prev",tail)
+ run(replace,"replaceinjections",tail)
+ setfield(tail,"next",nil)
+ setfield(next,"prev",disc)
+ else
+ run(replace,"replaceinjections")
+ end
+end
+
+-- the if new test might be dangerous as luatex will check / set some tail stuff
+-- in a temp node
+
+local function comprun(disc,run)
+ if trace_compruns then
+ report_run("comp: %s",languages.serializediscretionary(disc))
+ end
+ --
+ local pre = getfield(disc,"pre")
+ if pre then
+ local new, done = run(pre)
+ if done then
+ setfield(disc,"pre",new)
+ end
+ end
+ --
+ local post = getfield(disc,"post")
+ if post then
+ local new, done = run(post)
+ if done then
+ setfield(disc,"post",new)
+ end
+ end
+ --
+ local replace = getfield(disc,"replace")
+ if replace then
+ local new, done = run(replace)
+ if done then
+ setfield(disc,"replace",new)
+ end
+ end
+end
+
+local function testrun(disc,trun,crun)
+ local next = getnext(disc)
+ if next then
+ local replace = getfield(disc,"replace")
+ if replace then
+ local prev = getprev(disc)
+ if prev then
+ -- only look ahead
+ local tail = find_node_tail(replace)
+ -- local nest = getprev(replace)
+ setfield(tail,"next",next)
+ setfield(next,"prev",tail)
+ if trun(replace,next) then
+ setfield(disc,"replace",nil) -- beware, side effects of nest so first
+ setfield(prev,"next",replace)
+ setfield(replace,"prev",prev)
+ setfield(next,"prev",tail)
+ setfield(tail,"next",next)
+ setfield(disc,"prev",nil)
+ setfield(disc,"next",nil)
+ flush_node_list(disc)
+ return replace -- restart
+ else
+ setfield(tail,"next",nil)
+ setfield(next,"prev",disc)
+ end
+ else
+ -- weird case
+ end
+ else
+ -- no need
+ end
+ else
+ -- weird case
+ end
+ comprun(disc,crun)
+ return next
+end
+
+local function discrun(disc,drun,krun)
+ local next = getnext(disc)
+ local prev = getprev(disc)
+ if trace_discruns then
+ report_run("disc") -- will be more detailed
+ end
+ if next and prev then
+ setfield(prev,"next",next)
+ -- setfield(next,"prev",prev)
+ drun(prev)
+ setfield(prev,"next",disc)
+ -- setfield(next,"prev",disc)
+ end
+ --
+ local pre = getfield(disc,"pre")
+ if not pre then
+ -- go on
+ elseif prev then
+ local nest = getprev(pre)
+ setfield(pre,"prev",prev)
+ setfield(prev,"next",pre)
+ krun(prev,"preinjections")
+ setfield(pre,"prev",nest)
+ setfield(prev,"next",disc)
+ else
+ krun(pre,"preinjections")
+ end
+ return next
+end
+
+-- todo: maybe run lr and rl stretches
+
+local nesting = 0
+
+local function featuresprocessor(head,font,attr)
+
+ local sequences = sequencelists[font] -- temp hack
+
+ if not sequencelists then
+ return head, false
+ end
+
+ nesting = nesting + 1
+
+ if nesting == 1 then
+
+ currentfont = font
+ tfmdata = fontdata[font]
+ descriptions = tfmdata.descriptions
+ characters = tfmdata.characters
+ marks = tfmdata.resources.marks
+ factor = tfmdata.parameters.factor
+
+ elseif currentfont ~= font then
+
+ report_warning("nested call with a different font, level %s, quitting",nesting)
+ nesting = nesting - 1
+ return head, false
+
+ end
+
+ head = tonut(head)
+
+ if trace_steps then
+ checkstep(head)
+ end
+
+ local rlmode = 0
+ local done = false
+ local datasets = otf.dataset(tfmdata,font,attr)
+
+ local dirstack = { } -- could move outside function
+
+ -- We could work on sub start-stop ranges instead but I wonder if there is that
+ -- much speed gain (experiments showed that it made not much sense) and we need
+ -- to keep track of directions anyway. Also at some point I want to play with
+ -- font interactions and then we do need the full sweeps.
+
+ -- Keeping track of the headnode is needed for devanagari (I generalized it a bit
+ -- so that multiple cases are also covered.)
+
+ -- We don't goto the next node of a disc node is created so that we can then treat
+ -- the pre, post and replace. It's abit of a hack but works out ok for most cases.
+
+ -- there can be less subtype and attr checking in the comprun etc helpers
+
+ for s=1,#datasets do
+ local dataset = datasets[s]
+ ----- featurevalue = dataset[1] -- todo: pass to function instead of using a global
+ local attribute = dataset[2]
+ local chain = dataset[3] -- sequence.chain or 0
+ ----- kind = dataset[4]
+ local sequence = dataset[5] -- sequences[s] -- also dataset[5]
+ local rlparmode = 0
+ local topstack = 0
+ local success = false
+ local typ = sequence.type
+ local gpossing = typ == "gpos_single" or typ == "gpos_pair"
+ local handler = handlers[typ]
+ local steps = sequence.steps
+ local nofsteps = sequence.nofsteps
+ if chain < 0 then
+ -- this is a limited case, no special treatments like 'init' etc
+ -- we need to get rid of this slide! probably no longer needed in latest luatex
+ local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo
+ while start do
+ local id = getid(start)
+ if id == glyph_code then
+ if getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
+ if a then
+ a = a == attr
+ else
+ a = true
+ end
+ if a then
+ local char = getchar(start)
+ for i=1,nofsteps do
+ local step = steps[i]
+ local lookupcache = step.coverage
+ if lookupcache then
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ -- todo: disc?
+ head, start, success = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,i)
+ if success then
+ break
+ end
+ end
+ else
+ report_missing_cache(dataset,sequence)
+ end
+ end
+ if start then start = getprev(start) end
+ else
+ start = getprev(start)
+ end
+ else
+ start = getprev(start)
+ end
+ else
+ start = getprev(start)
+ end
+ end
+ else
+ local start = head -- local ?
+ rlmode = 0 -- to be checked ?
+ if nofsteps == 1 then -- happens often
+ local step = steps[1]
+ local lookupcache = step.coverage
+ if not lookupcache then -- also check for empty cache
+ report_missing_cache(dataset,sequence)
+ else
+
+ local function c_run(start) -- no need to check for 256 and attr probably also the same
+ local head = start
+ local done = false
+ while start do
+ local id = getid(start)
+ if id ~= glyph_code then
+ -- very unlikely
+ start = getnext(start)
+ elseif getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
+ else
+ a = not attribute or getprop(start,a_state) == attribute
+ end
+ if a then
+ local lookupmatch = lookupcache[getchar(start)]
+ if lookupmatch then
+ -- sequence kan weg
+ local ok
+ head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,1)
+ if ok then
+ done = true
+ end
+ end
+ if start then start = getnext(start) end
+ else
+ start = getnext(start)
+ end
+ else
+ return head, false
+ end
+ end
+ if done then
+ success = true -- needed in this subrun?
+ end
+ return head, done
+ end
+
+ local function t_run(start,stop)
+ while start ~= stop do
+ local id = getid(start)
+ if id == glyph_code and getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
+ else
+ a = not attribute or getprop(start,a_state) == attribute
+ end
+ if a then
+ local lookupmatch = lookupcache[getchar(start)]
+ if lookupmatch then -- hm, hyphens can match (tlig) so we need to really check
+ -- if we need more than ligatures we can outline the code and use functions
+ local s = getnext(start)
+ local l = nil
+ while s do
+ local lg = lookupmatch[getchar(s)]
+ if lg then
+ l = lg
+ s = getnext(s)
+ else
+ break
+ end
+ end
+ if l and l.ligature then
+ return true
+ end
+ end
+ end
+ start = getnext(start)
+ else
+ break
+ end
+ end
+ end
+
+ local function d_run(prev) -- we can assume that prev and next are glyphs
+ local a = getattr(prev,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(prev,a_state) == attribute)
+ else
+ a = not attribute or getprop(prev,a_state) == attribute
+ end
+ if a then
+ local lookupmatch = lookupcache[getchar(prev)]
+ if lookupmatch then
+ -- sequence kan weg
+ local h, d, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,1)
+ if ok then
+ done = true
+ success = true
+ end
+ end
+ end
+ end
+
+ local function k_run(sub,injection,last)
+ local a = getattr(sub,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(sub,a_state) == attribute)
+ else
+ a = not attribute or getprop(sub,a_state) == attribute
+ end
+ if a then
+ -- sequence kan weg
+ for n in traverse_nodes(sub) do -- only gpos
+ if n == last then
+ break
+ end
+ local id = getid(n)
+ if id == glyph_code then
+ local lookupmatch = lookupcache[getchar(n)]
+ if lookupmatch then
+ local h, d, ok = handler(sub,n,dataset,sequence,lookupmatch,rlmode,step,1,injection)
+ if ok then
+ done = true
+ success = true
+ end
+ end
+ else
+ -- message
+ end
+ end
+ end
+ end
+
+ while start do
+ local id = getid(start)
+ if id == glyph_code then
+ if getfont(start) == font and getsubtype(start) < 256 then -- why a 256 test ...
+ local a = getattr(start,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
+ else
+ a = not attribute or getprop(start,a_state) == attribute
+ end
+ if a then
+ local char = getchar(start)
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ -- sequence kan weg
+ local ok
+ head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,1)
+ if ok then
+ success = true
+ elseif gpossing and zwnjruns and char == zwnj then
+ discrun(start,d_run)
+ end
+ elseif gpossing and zwnjruns and char == zwnj then
+ discrun(start,d_run)
+ end
+ if start then start = getnext(start) end
+ else
+ start = getnext(start)
+ end
+ else
+ start = getnext(start)
+ end
+ elseif id == disc_code then
+ local discretionary = getsubtype(start) == discretionary_code
+ if gpossing then
+ if discretionary then
+ kernrun(start,k_run)
+ else
+ discrun(start,d_run,k_run)
+ end
+ start = getnext(start)
+ elseif discretionary then
+ if typ == "gsub_ligature" then
+ start = testrun(start,t_run,c_run)
+ else
+ comprun(start,c_run)
+ start = getnext(start)
+ end
+ else
+ start = getnext(start)
+ end
+ elseif id == whatsit_code then -- will be function
+ local subtype = getsubtype(start)
+ if subtype == dir_code then
+ local dir = getfield(start,"dir")
+ if dir == "+TRT" or dir == "+TLT" then
+ topstack = topstack + 1
+ dirstack[topstack] = dir
+ elseif dir == "-TRT" or dir == "-TLT" then
+ topstack = topstack - 1
+ end
+ local newdir = dirstack[topstack]
+ if newdir == "+TRT" then
+ rlmode = -1
+ elseif newdir == "+TLT" then
+ rlmode = 1
+ else
+ rlmode = rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,mref(rlparmode),mref(rlmode),topstack,mref(newdir))
+ end
+ elseif subtype == localpar_code then
+ local dir = getfield(start,"dir")
+ if dir == "TRT" then
+ rlparmode = -1
+ elseif dir == "TLT" then
+ rlparmode = 1
+ else
+ rlparmode = 0
+ end
+ -- one might wonder if the par dir should be looked at, so we might as well drop the next line
+ rlmode = rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,mref(rlparmode),mref(rlmode))
+ end
+ end
+ start = getnext(start)
+ elseif id == math_code then
+ start = getnext(end_of_math(start))
+ else
+ start = getnext(start)
+ end
+ end
+ end
+
+ else
+
+ local function c_run(start)
+ local head = start
+ local done = false
+ while start do
+ local id = getid(start)
+ if id ~= glyph_code then
+ -- very unlikely
+ start = getnext(start)
+ elseif getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
+ else
+ a = not attribute or getprop(start,a_state) == attribute
+ end
+ if a then
+ local char = getchar(start)
+ local lookupcache = step.coverage
+ for i=1,nofsteps do
+ local step = steps[i]
+ local lookupcache = step.coverage
+ if lookupcache then
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local ok
+ head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,i)
+ if ok then
+ done = true
+ break
+ elseif not start then
+ -- don't ask why ... shouldn't happen
+ break
+ end
+ end
+ else
+ report_missing_cache(dataset,sequence)
+ end
+ end
+ if start then start = getnext(start) end
+ else
+ start = getnext(start)
+ end
+ else
+ return head, false
+ end
+ end
+ if done then
+ success = true
+ end
+ return head, done
+ end
+
+ local function d_run(prev)
+ local a = getattr(prev,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(prev,a_state) == attribute)
+ else
+ a = not attribute or getprop(prev,a_state) == attribute
+ end
+ if a then
+ -- brr prev can be disc
+ local char = getchar(prev)
+ for i=1,nofsteps do
+ local step = steps[i]
+ local lookupcache = step.coverage
+ if lookupcache then
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local h, d, ok = handler(head,prev,dataset,sequence,lookupmatch,rlmode,step,i)
+ if ok then
+ done = true
+ break
+ end
+ end
+ else
+ report_missing_cache(dataset,sequence)
+ end
+ end
+ end
+ end
+
+ local function k_run(sub,injection,last)
+ local a = getattr(sub,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(sub,a_state) == attribute)
+ else
+ a = not attribute or getprop(sub,a_state) == attribute
+ end
+ if a then
+ for n in traverse_nodes(sub) do -- only gpos
+ if n == last then
+ break
+ end
+ local id = getid(n)
+ if id == glyph_code then
+ local char = getchar(n)
+ for i=1,nofsteps do
+ local step = steps[i]
+ local lookupcache = step.coverage
+ if lookupcache then
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ local h, d, ok = handler(head,n,dataset,sequence,lookupmatch,step,rlmode,i,injection)
+ if ok then
+ done = true
+ break
+ end
+ end
+ else
+ report_missing_cache(dataset,sequence)
+ end
+ end
+ else
+ -- message
+ end
+ end
+ end
+ end
+
+ local function t_run(start,stop)
+ while start ~= stop do
+ local id = getid(start)
+ if id == glyph_code and getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
+ else
+ a = not attribute or getprop(start,a_state) == attribute
+ end
+ if a then
+ local char = getchar(start)
+ for i=1,nofsteps do
+ local step = steps[i]
+ local lookupcache = step.coverage
+ if lookupcache then
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ -- if we need more than ligatures we can outline the code and use functions
+ local s = getnext(start)
+ local l = nil
+ while s do
+ local lg = lookupmatch[getchar(s)]
+ if lg then
+ l = lg
+ s = getnext(s)
+ else
+ break
+ end
+ end
+ if l and l.ligature then
+ return true
+ end
+ end
+ else
+ report_missing_cache(dataset,sequence)
+ end
+ end
+ end
+ start = getnext(start)
+ else
+ break
+ end
+ end
+ end
+
+ while start do
+ local id = getid(start)
+ if id == glyph_code then
+ if getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
+ else
+ a = not attribute or getprop(start,a_state) == attribute
+ end
+ if a then
+ for i=1,nofsteps do
+ local step = steps[i]
+ local lookupcache = step.coverage
+ if lookupcache then
+ local char = getchar(start)
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local ok
+ head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,i)
+ if ok then
+ success = true
+ break
+ elseif not start then
+ -- don't ask why ... shouldn't happen
+ break
+ elseif gpossing and zwnjruns and char == zwnj then
+ discrun(start,d_run)
+ end
+ elseif gpossing and zwnjruns and char == zwnj then
+ discrun(start,d_run)
+ end
+ else
+ report_missing_cache(dataset,sequence)
+ end
+ end
+ if start then start = getnext(start) end
+ else
+ start = getnext(start)
+ end
+ else
+ start = getnext(start)
+ end
+ elseif id == disc_code then
+ local discretionary = getsubtype(start) == discretionary_code
+ if gpossing then
+ if discretionary then
+ kernrun(start,k_run)
+ else
+ discrun(start,d_run,k_run)
+ end
+ start = getnext(start)
+ elseif discretionary then
+ if typ == "gsub_ligature" then
+ start = testrun(start,t_run,c_run)
+ else
+ comprun(start,c_run)
+ start = getnext(start)
+ end
+ else
+ start = getnext(start)
+ end
+ elseif id == whatsit_code then
+ local subtype = getsubtype(start)
+ if subtype == dir_code then
+ local dir = getfield(start,"dir")
+ if dir == "+TRT" or dir == "+TLT" then
+ topstack = topstack + 1
+ dirstack[topstack] = dir
+ elseif dir == "-TRT" or dir == "-TLT" then
+ topstack = topstack - 1
+ end
+ local newdir = dirstack[topstack]
+ if newdir == "+TRT" then
+ rlmode = -1
+ elseif newdir == "+TLT" then
+ rlmode = 1
+ else
+ rlmode = rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,mref(rlparmode),mref(rlmode),topstack,mref(newdir))
+ end
+ elseif subtype == localpar_code then
+ local dir = getfield(start,"dir")
+ if dir == "TRT" then
+ rlparmode = -1
+ elseif dir == "TLT" then
+ rlparmode = 1
+ else
+ rlparmode = 0
+ end
+ rlmode = rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,mref(rlparmode),mref(rlmode))
+ end
+ end
+ start = getnext(start)
+ elseif id == math_code then
+ start = getnext(end_of_math(start))
+ else
+ start = getnext(start)
+ end
+ end
+ end
+ end
+ if success then
+ done = true
+ end
+ if trace_steps then -- ?
+ registerstep(head)
+ end
+
+ end
+
+ nesting = nesting - 1
+ head = tonode(head)
+
+ return head, done
+end
+
+-- so far
+
+local function featuresinitializer(tfmdata,value)
+ -- nothing done here any more
+end
+
+registerotffeature {
+ name = "features",
+ description = "features",
+ default = true,
+ initializers = {
+ position = 1,
+ node = featuresinitializer,
+ },
+ processors = {
+ node = featuresprocessor,
+ }
+}
+
+-- This can be used for extra handlers, but should be used with care!
+
+otf.handlers = handlers -- used in devanagari
diff --git a/tex/context/base/font-oup.lua b/tex/context/base/font-oup.lua
new file mode 100644
index 000000000..a1c225ee9
--- /dev/null
+++ b/tex/context/base/font-oup.lua
@@ -0,0 +1,1921 @@
+if not modules then modules = { } end modules ['font-oup'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local next, type = next, type
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+local lpegmatch = lpeg.match
+local insert, remove = table.insert, table.remove
+
+local formatters = string.formatters
+local sortedkeys = table.sortedkeys
+local sortedhash = table.sortedhash
+local tohash = table.tohash
+
+local report = logs.reporter("otf reader")
+
+local trace_markwidth = false trackers.register("otf.markwidth",function(v) trace_markwidth = v end)
+
+local readers = fonts.handlers.otf.readers
+local privateoffset = fonts.constructors and fonts.constructors.privateoffset or 0xF0000 -- 0x10FFFF
+
+local f_private = formatters["P%05X"]
+local f_unicode = formatters["U%05X"]
+local f_index = formatters["I%05X"]
+
+local function replaced(list,index,replacement)
+ if type(list) == "number" then
+ return replacement
+ elseif type(replacement) == "table" then
+ local t = { }
+ local n = index-1
+ for i=1,n do
+ t[i] = list[i]
+ end
+ for i=1,#replacement do
+ n = n + 1
+ t[n] = replacement[i]
+ end
+ for i=index+1,#list do
+ n = n + 1
+ t[n] = list[i]
+ end
+ else
+ list[index] = replacement
+ return list
+ end
+end
+
+local function unifyresources(fontdata,indices)
+ local descriptions = fontdata.descriptions
+ local resources = fontdata.resources
+ if not descriptions or not resources then
+ return
+ end
+ --
+ local variants = fontdata.variants
+ if variants then
+ for selector, unicodes in next, variants do
+ for unicode, index in next, unicodes do
+ unicodes[unicode] = indices[index]
+ end
+ end
+ end
+ --
+ local function remark(marks)
+ if marks then
+ local newmarks = { }
+ for k, v in next, marks do
+ local u = indices[k]
+ if u then
+ newmarks[u] = v
+ else
+ report("discarding mark %i",k)
+ end
+ end
+ return newmarks
+ end
+ end
+ --
+ local marks = resources.marks
+ if marks then
+ resources.marks = remark(marks)
+ end
+ --
+ local markclasses = resources.markclasses
+ if markclasses then
+ for class, marks in next, markclasses do
+ markclasses[class] = remark(marks)
+ end
+ end
+ --
+ local marksets = resources.marksets
+ if marksets then
+ for class, marks in next, marksets do
+ marksets[class] = remark(marks)
+ end
+ end
+ --
+ local done = { } -- we need to deal with shared !
+ --
+ local function recover(cover) -- can be packed
+ for i=1,#cover do
+ local c = cover[i]
+ if not done[c] then
+ local t = { }
+ for k, v in next, c do
+ t[indices[k]] = v
+ end
+ cover[i] = t
+ done[c] = d
+ end
+ end
+ end
+ --
+ local function recursed(c) -- ligs are not packed
+ local t = { }
+ for g, d in next, c do
+ if type(d) == "table" then
+ t[indices[g]] = recursed(d)
+ else
+ t[g] = indices[d] -- ligature
+ end
+ end
+ return t
+ end
+ --
+ local function unifythem(sequences)
+ if not sequences then
+ return
+ end
+ for i=1,#sequences do
+ local sequence = sequences[i]
+ local kind = sequence.type
+ local steps = sequence.steps
+ local features = sequence.features
+ if steps then
+ for i=1,#steps do
+ local step = steps[i]
+ if kind == "gsub_single" then
+ local c = step.coverage
+ if c then
+ local t1 = done[c]
+ if not t1 then
+ t1 = { }
+ for g1, d1 in next, c do
+ t1[indices[g1]] = indices[d1]
+ end
+ done[c] = t1
+ end
+ step.coverage = t1
+ end
+ elseif kind == "gpos_pair" then
+ local c = step.coverage
+ if c then
+ local t1 = done[c]
+ if not t1 then
+ t1 = { }
+ for g1, d1 in next, c do
+ local t2 = done[d1]
+ if not t2 then
+ t2 = { }
+ for g2, d2 in next, d1 do
+ t2[indices[g2]] = d2
+ end
+ done[d1] = t2
+ end
+ t1[indices[g1]] = t2
+ end
+ done[c] = t1
+ end
+ step.coverage = t1
+ end
+ elseif kind == "gsub_ligature" then
+ local c = step.coverage
+ if c then
+ step.coverage = recursed(c)
+ end
+ elseif kind == "gsub_alternate" or kind == "gsub_multiple" then
+ local c = step.coverage
+ if c then
+ local t1 = done[c]
+ if not t1 then
+ t1 = { }
+ for g1, d1 in next, c do
+ for i=1,#d1 do
+ d1[i] = indices[d1[i]]
+ end
+ t1[indices[g1]] = d1
+ end
+ done[c] = t1
+ end
+ step.coverage = t1
+ end
+ elseif kind == "gpos_mark2base" or kind == "gpos_mark2mark" or kind == "gpos_mark2ligature" then
+ local c = step.coverage
+ if c then
+ local t1 = done[c]
+ if not t1 then
+ t1 = { }
+ for g1, d1 in next, c do
+ t1[indices[g1]] = d1
+ end
+ done[c] = t1
+ end
+ step.coverage = t1
+ end
+ local c = step.baseclasses
+ if c then
+ local t1 = done[c]
+ if not t1 then
+ for g1, d1 in next, c do
+ local t2 = done[d1]
+ if not t2 then
+ t2 = { }
+ for g2, d2 in next, d1 do
+ t2[indices[g2]] = d2
+ end
+ done[d1] = t2
+ end
+ c[g1] = t2
+ end
+ done[c] = c
+ end
+ end
+ elseif kind == "gpos_single" or kind == "gpos_cursive" then
+ local c = step.coverage
+ if c then
+ local t1 = done[c]
+ if not t1 then
+ t1 = { }
+ for g1, d1 in next, c do
+ t1[indices[g1]] = d1
+ end
+ done[c] = t1
+ end
+ step.coverage = t1
+ end
+ end
+ --
+ local rules = step.rules
+ if rules then
+ for i=1,#rules do
+ local rule = rules[i]
+ --
+ local before = rule.before if before then recover(before) end
+ local after = rule.after if after then recover(after) end
+ local current = rule.current if current then recover(current) end
+ --
+ local replacements = rule.replacements
+ if replacements then
+ if not done[replacements] then
+ local r = { }
+ for k, v in next, replacements do
+ r[indices[k]] = indices[v]
+ end
+ rule.replacements = r
+ done[replacements] = r
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ --
+ unifythem(resources.sequences)
+ unifythem(resources.sublookups)
+end
+
+local ignore = { -- should we fix them?
+ ["notdef"] = true,
+ [".notdef"] = true,
+ ["null"] = true,
+ [".null"] = true,
+ ["nonmarkingreturn"] = true,
+}
+
+
+local function checklookups(fontdata,missing,nofmissing)
+ local descriptions = fontdata.descriptions
+ local resources = fontdata.resources
+ if missing and nofmissing and nofmissing <= 0 then
+ return
+ end
+ --
+ local singles = { }
+ local alternates = { }
+ local ligatures = { }
+
+ if not missing then
+ missing = { }
+ nofmissing = 0
+ for u, d in next, descriptions do
+ if not d.unicode then
+ nofmissing = nofmissing + 1
+ missing[u] = true
+ end
+ end
+ end
+
+ local function collectthem(sequences)
+ if not sequences then
+ return
+ end
+ for i=1,#sequences do
+ local sequence = sequences[i]
+ local kind = sequence.type
+ local steps = sequence.steps
+ if steps then
+ for i=1,#steps do
+ local step = steps[i]
+ if kind == "gsub_single" then
+ local c = step.coverage
+ if c then
+ singles[#singles+1] = c
+ end
+ elseif kind == "gsub_alternate" then
+ local c = step.coverage
+ if c then
+ alternates[#alternates+1] = c
+ end
+ elseif kind == "gsub_ligature" then
+ local c = step.coverage
+ if c then
+ ligatures[#ligatures+1] = c
+ end
+ end
+ end
+ end
+ end
+ end
+
+ collectthem(resources.sequences)
+ collectthem(resources.sublookups)
+
+ local loops = 0
+ while true do
+ loops = loops + 1
+ local old = nofmissing
+ for i=1,#singles do
+ local c = singles[i]
+ for g1, g2 in next, c do
+ if missing[g1] then
+ local u2 = descriptions[g2].unicode
+ if u2 then
+ missing[g1] = false
+ descriptions[g1].unicode = u2
+ nofmissing = nofmissing - 1
+ end
+ end
+ if missing[g2] then
+ local u1 = descriptions[g1].unicode
+ if u1 then
+ missing[g2] = false
+ descriptions[g2].unicode = u1
+ nofmissing = nofmissing - 1
+ end
+ end
+ end
+ end
+ for i=1,#alternates do
+ local c = alternates[i]
+ -- maybe first a g1 loop and then a g2
+ for g1, d1 in next, c do
+ if missing[g1] then
+ for i=1,#d1 do
+ local g2 = d1[i]
+ local u2 = descriptions[g2].unicode
+ if u2 then
+ missing[g1] = false
+ descriptions[g1].unicode = u2
+ nofmissing = nofmissing - 1
+ end
+ end
+ end
+ if not missing[g1] then
+ for i=1,#d1 do
+ local g2 = d1[i]
+ if missing[g2] then
+ local u1 = descriptions[g1].unicode
+ if u1 then
+ missing[g2] = false
+ descriptions[g2].unicode = u1
+ nofmissing = nofmissing - 1
+ end
+ end
+ end
+ end
+ end
+ end
+ if nofmissing <= 0 then
+ report("all done in %s loops",loops)
+ return
+ elseif old == nofmissing then
+ break
+ end
+ end
+
+ local t, n -- no need to insert/remove and allocate many times
+
+ local function recursed(c)
+ for g, d in next, c do
+ if g ~= "ligature" then
+ local u = descriptions[g].unicode
+ if u then
+ n = n + 1
+ t[n] = u
+ recursed(d)
+ n = n - 1
+ end
+ elseif missing[d] then
+ local l = { }
+ local m = 0
+ for i=1,n do
+ local u = t[i]
+ if type(u) == "table" then
+ for i=1,#u do
+ m = m + 1
+ l[m] = u[i]
+ end
+ else
+ m = m + 1
+ l[m] = u
+ end
+ end
+ missing[d] = false
+ descriptions[d].unicode = l
+ nofmissing = nofmissing - 1
+ end
+ end
+ end
+
+ if nofmissing > 0 then
+ t = { }
+ n = 0
+ local loops = 0
+ while true do
+ loops = loops + 1
+ local old = nofmissing
+ for i=1,#ligatures do
+ recursed(ligatures[i])
+ end
+ if nofmissing <= 0 then
+ report("all done in %s loops",loops)
+ return
+ elseif old == nofmissing then
+ break
+ end
+ end
+ t = nil
+ n = 0
+ end
+
+ if nofmissing > 0 then
+ local done = { }
+ for i, r in next, missing do
+ if r then
+ local name = descriptions[i].name or f_index(i)
+ if not ignore[name] then
+ done[#done+1] = name
+ end
+ end
+ end
+ if #done > 0 then
+ table.sort(done)
+ report("not unicoded: % t",done)
+ end
+ end
+end
+
+local function unifymissing(fontdata)
+ if not fonts.mappings then
+ require("font-map")
+ require("font-agl")
+ end
+ local unicodes = { }
+ local private = fontdata.private
+ local resources = fontdata.resources
+ resources.unicodes = unicodes
+ for unicode, d in next, fontdata.descriptions do
+ if unicode < privateoffset then
+ local name = d.name
+ if name then
+ unicodes[name] = unicode
+ end
+ end
+ end
+ fonts.mappings.addtounicode(fontdata,fontdata.filename,checklookups)
+ resources.unicodes = nil
+end
+
+local function unifyglyphs(fontdata,usenames)
+ local private = fontdata.private or privateoffset
+ local glyphs = fontdata.glyphs
+ local indices = { }
+ local descriptions = { }
+ local names = usenames and { }
+ local resources = fontdata.resources
+ local zero = glyphs[0]
+ local zerocode = zero.unicode
+ if not zerocode then
+ zerocode = private
+ zero.unicode = zerocode
+ private = private + 1
+ end
+ descriptions[zerocode] = zero
+ if names then
+ local name = glyphs[0].name or f_private(zerocode)
+ indices[0] = name
+ names[name] = zerocode
+ else
+ indices[0] = zerocode
+ end
+ --
+ for index=1,#glyphs do
+ local glyph = glyphs[index]
+ local unicode = glyph.unicode -- this is the primary one
+ if not unicode then
+ -- report("assigning private unicode %U to glyph indexed %05X",index,private)
+ unicode = private
+ -- glyph.unicode = -1
+ if names then
+ local name = glyph.name or f_private(unicode)
+ indices[index] = name
+ names[name] = unicode
+ else
+ indices[index] = unicode
+ end
+ private = private + 1
+ else
+ if names then
+ local name = glyph.name or f_unicode(unicode)
+ indices[index] = name
+ names[name] = unicode
+ else
+ indices[index] = unicode
+ end
+ end
+ descriptions[unicode] = glyph
+ end
+ --
+ for index=1,#glyphs do
+ local math = glyphs[index].math
+ if math then
+ local list = math.vparts
+ if list then
+ for i=1,#list do local l = list[i] l.glyph = indices[l.glyph] end
+ end
+ local list = math.hparts
+ if list then
+ for i=1,#list do local l = list[i] l.glyph = indices[l.glyph] end
+ end
+ local list = math.vvariants
+ if list then
+ -- for i=1,#list do local l = list[i] l.glyph = indices[l.glyph] end
+ for i=1,#list do list[i] = indices[list[i]] end
+ end
+ local list = math.hvariants
+ if list then
+ -- for i=1,#list do local l = list[i] l.glyph = indices[l.glyph] end
+ for i=1,#list do list[i] = indices[list[i]] end
+ end
+ end
+ end
+ --
+ fontdata.private = private
+ fontdata.glyphs = nil
+ fontdata.names = names
+ fontdata.descriptions = descriptions
+ fontdata.hashmethod = hashmethod
+ --
+ return indices, names
+end
+
+local p_bogusname = (
+ (P("uni") + P("UNI") + P("Uni") + P("U") + P("u")) * S("Xx")^0 * R("09","AF")^1
+ + (P("identity") + P("Identity") + P("IDENTITY")) * R("09","AF")^1
+ + (P("index") + P("Index") + P("INDEX")) * R("09")^1
+) * P(-1)
+
+local function stripredundant(fontdata)
+ local descriptions = fontdata.descriptions
+ if descriptions then
+ local n = 0
+ local c = 0
+ for unicode, d in next, descriptions do
+ local name = d.name
+ if name and lpegmatch(p_bogusname,name) then
+ d.name = nil
+ n = n + 1
+ end
+ if d.class == "base" then
+ d.class = nil
+ c = c + 1
+ end
+ end
+ if n > 0 then
+ report("%s bogus names removed (verbose unicode)",n)
+ end
+ if c > 0 then
+ report("%s base class tags removed (default is base)",c)
+ end
+ end
+end
+
+function readers.rehash(fontdata,hashmethod) -- TODO: combine loops in one
+ if not (fontdata and fontdata.glyphs) then
+ return
+ end
+ if hashmethod == "indices" then
+ fontdata.hashmethod = "indices"
+ elseif hashmethod == "names" then
+ fontdata.hashmethod = "names"
+ local indices = unifyglyphs(fontdata,true)
+ unifyresources(fontdata,indices)
+ unifymissing(fontdata)
+ -- stripredundant(fontdata)
+ else
+ fontdata.hashmethod = "unicode"
+ local indices = unifyglyphs(fontdata)
+ unifyresources(fontdata,indices)
+ unifymissing(fontdata)
+ stripredundant(fontdata)
+ end
+end
+
+function readers.checkhash(fontdata)
+ local hashmethod = fontdata.hashmethod
+ if hashmethod == "unicodes" then
+ fontdata.names = nil -- just to be sure
+ elseif hashmethod == "names" and fontdata.names then
+ unifyresources(fontdata,fontdata.names)
+ fontdata.hashmethod = "unicode"
+ fontdata.names = nil -- no need for it
+ else
+ readers.rehash(fontdata,"unicode")
+ end
+end
+
+function readers.addunicodetable(fontdata)
+ local resources = fontdata.resources
+ local unicodes = resources.unicodes
+ if not unicodes then
+ unicodes = { }
+ resources.unicodes = unicodes
+ for u, d in next, fontdata.descriptions do
+ local n = d.name
+ if n then
+ unicodes[n] = u
+ end
+ end
+ end
+end
+
+-- for the moment here:
+
+local concat, sort = table.concat, table.sort
+local next, type, tostring = next, type, tostring
+
+local criterium = 1
+local threshold = 0
+
+local trace_packing = false trackers.register("otf.packing", function(v) trace_packing = v end)
+local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
+
+local report_otf = logs.reporter("fonts","otf loading")
+
+local function tabstr_normal(t)
+ local s = { }
+ local n = 0
+ for k, v in next, t do
+ n = n + 1
+ if type(v) == "table" then
+ s[n] = k .. ">" .. tabstr_normal(v)
+ elseif v == true then
+ s[n] = k .. "+" -- "=true"
+ elseif v then
+ s[n] = k .. "=" .. v
+ else
+ s[n] = k .. "-" -- "=false"
+ end
+ end
+ if n == 0 then
+ return ""
+ elseif n == 1 then
+ return s[1]
+ else
+ sort(s) -- costly but needed (occasional wrong hit otherwise)
+ return concat(s,",")
+ end
+end
+
+local function tabstr_flat(t)
+ local s = { }
+ local n = 0
+ for k, v in next, t do
+ n = n + 1
+ s[n] = k .. "=" .. v
+ end
+ if n == 0 then
+ return ""
+ elseif n == 1 then
+ return s[1]
+ else
+ sort(s) -- costly but needed (occasional wrong hit otherwise)
+ return concat(s,",")
+ end
+end
+
+local function tabstr_mixed(t) -- indexed
+ local s = { }
+ local n = #t
+ if n == 0 then
+ return ""
+ elseif n == 1 then
+ local k = t[1]
+ if k == true then
+ return "++" -- we need to distinguish from "true"
+ elseif k == false then
+ return "--" -- we need to distinguish from "false"
+ else
+ return tostring(k) -- number or string
+ end
+ else
+ for i=1,n do
+ local k = t[i]
+ if k == true then
+ s[i] = "++" -- we need to distinguish from "true"
+ elseif k == false then
+ s[i] = "--" -- we need to distinguish from "false"
+ else
+ s[i] = k -- number or string
+ end
+ end
+ return concat(s,",")
+ end
+end
+
+local function tabstr_boolean(t)
+ local s = { }
+ local n = 0
+ for k, v in next, t do
+ n = n + 1
+ if v then
+ s[n] = k .. "+"
+ else
+ s[n] = k .. "-"
+ end
+ end
+ if n == 0 then
+ return ""
+ elseif n == 1 then
+ return s[1]
+ else
+ sort(s) -- costly but needed (occasional wrong hit otherwise)
+ return concat(s,",")
+ end
+end
+
+-- beware: we cannot unpack and repack the same table because then sharing
+-- interferes (we could catch this if needed) .. so for now: save, reload
+-- and repack in such cases (never needed anyway) .. a tricky aspect is that
+-- we then need to sort more thanks to random hashing
+
+function readers.pack(data)
+
+ if data then
+
+ local h, t, c = { }, { }, { }
+ local hh, tt, cc = { }, { }, { }
+ local nt, ntt = 0, 0
+
+ local function pack_normal(v)
+ local tag = tabstr_normal(v)
+ local ht = h[tag]
+ if ht then
+ c[ht] = c[ht] + 1
+ return ht
+ else
+ nt = nt + 1
+ t[nt] = v
+ h[tag] = nt
+ c[nt] = 1
+ return nt
+ end
+ end
+
+ local function pack_flat(v)
+ local tag = tabstr_flat(v)
+ local ht = h[tag]
+ if ht then
+ c[ht] = c[ht] + 1
+ return ht
+ else
+ nt = nt + 1
+ t[nt] = v
+ h[tag] = nt
+ c[nt] = 1
+ return nt
+ end
+ end
+
+ local function pack_boolean(v)
+ local tag = tabstr_boolean(v)
+ local ht = h[tag]
+ if ht then
+ c[ht] = c[ht] + 1
+ return ht
+ else
+ nt = nt + 1
+ t[nt] = v
+ h[tag] = nt
+ c[nt] = 1
+ return nt
+ end
+ end
+
+ local function pack_indexed(v)
+ local tag = concat(v," ")
+ local ht = h[tag]
+ if ht then
+ c[ht] = c[ht] + 1
+ return ht
+ else
+ nt = nt + 1
+ t[nt] = v
+ h[tag] = nt
+ c[nt] = 1
+ return nt
+ end
+ end
+
+ local function pack_mixed(v)
+ local tag = tabstr_mixed(v)
+ local ht = h[tag]
+ if ht then
+ c[ht] = c[ht] + 1
+ return ht
+ else
+ nt = nt + 1
+ t[nt] = v
+ h[tag] = nt
+ c[nt] = 1
+ return nt
+ end
+ end
+
+ local function pack_final(v)
+ -- v == number
+ if c[v] <= criterium then
+ return t[v]
+ else
+ -- compact hash
+ local hv = hh[v]
+ if hv then
+ return hv
+ else
+ ntt = ntt + 1
+ tt[ntt] = t[v]
+ hh[v] = ntt
+ cc[ntt] = c[v]
+ return ntt
+ end
+ end
+ end
+
+ local function success(stage,pass)
+ if nt == 0 then
+ if trace_loading or trace_packing then
+ report_otf("pack quality: nothing to pack")
+ end
+ return false
+ elseif nt >= threshold then
+ local one, two, rest = 0, 0, 0
+ if pass == 1 then
+ for k,v in next, c do
+ if v == 1 then
+ one = one + 1
+ elseif v == 2 then
+ two = two + 1
+ else
+ rest = rest + 1
+ end
+ end
+ else
+ for k,v in next, cc do
+ if v > 20 then
+ rest = rest + 1
+ elseif v > 10 then
+ two = two + 1
+ else
+ one = one + 1
+ end
+ end
+ data.tables = tt
+ end
+ if trace_loading or trace_packing then
+ report_otf("pack quality: stage %s, pass %s, %s packed, 1-10:%s, 11-20:%s, rest:%s (criterium: %s)",
+ stage, pass, one+two+rest, one, two, rest, criterium)
+ end
+ return true
+ else
+ if trace_loading or trace_packing then
+ report_otf("pack quality: stage %s, pass %s, %s packed, aborting pack (threshold: %s)",
+ stage, pass, nt, threshold)
+ end
+ return false
+ end
+ end
+
+ local function packers(pass)
+ if pass == 1 then
+ return pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed
+ else
+ return pack_final, pack_final, pack_final, pack_final, pack_final
+ end
+ end
+
+ local resources = data.resources
+ local sequences = resources.sequences
+ local sublookups = resources.sublookups
+ local features = resources.features
+
+ local chardata = characters and characters.data
+ local descriptions = data.descriptions or data.glyphs
+
+ if not descriptions then
+ return
+ end
+
+ --
+
+ for pass=1,2 do
+
+ if trace_packing then
+ report_otf("start packing: stage 1, pass %s",pass)
+ end
+
+ local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass)
+
+ for unicode, description in next, descriptions do
+ local boundingbox = description.boundingbox
+ if boundingbox then
+ description.boundingbox = pack_indexed(boundingbox)
+ end
+ local math = description.math
+ if math then
+ local kerns = math.kerns
+ if kerns then
+ for tag, kern in next, kerns do
+ kerns[tag] = pack_normal(kern)
+ end
+ end
+ end
+ end
+
+ local function packthem(sequences)
+ for i=1,#sequences do
+ local sequence = sequences[i]
+ local kind = sequence.type
+ local steps = sequence.steps
+ local order = sequence.order
+ local features = sequence.features
+ local flags = sequence.flags
+ if steps then
+ for i=1,#steps do
+ local step = steps[i]
+ if kind == "gpos_pair" then
+ local c = step.coverage
+ if c then
+ if step.format == "kern" then
+ for g1, d1 in next, c do
+ c[g1] = pack_normal(d1)
+ end
+ else
+ for g1, d1 in next, c do
+ for g2, d2 in next, d1 do
+ local f = d2[1] if f then d2[1] = pack_indexed(f) end
+ local s = d2[2] if s then d2[2] = pack_indexed(s) end
+ end
+ end
+ end
+ end
+ elseif kind == "gpos_single" then
+ local c = step.coverage
+ if c then
+ if step.format == "kern" then
+ step.coverage = pack_normal(c)
+ else
+ for g1, d1 in next, c do
+ c[g1] = pack_indexed(d1)
+ end
+ end
+ end
+ elseif kind == "gpos_cursive" then
+ local c = step.coverage
+ if c then
+ for g1, d1 in next, c do
+ local f = d1[2] if f then d1[2] = pack_indexed(f) end
+ local s = d1[3] if s then d1[3] = pack_indexed(s) end
+ end
+ end
+ elseif kind == "gpos_mark2base" or kind == "gpos_mark2mark" then
+ local c = step.baseclasses
+ if c then
+ for g1, d1 in next, c do
+ for g2, d2 in next, d1 do
+ d1[g2] = pack_indexed(d2)
+ end
+ end
+ end
+ local c = step.coverage
+ if c then
+ for g1, d1 in next, c do
+ d1[2] = pack_indexed(d1[2])
+ end
+ end
+ elseif kind == "gpos_mark2ligature" then
+ local c = step.baseclasses
+ if c then
+ for g1, d1 in next, c do
+ for g2, d2 in next, d1 do
+ for g3, d3 in next, d2 do
+ d2[g3] = pack_indexed(d3)
+ end
+ end
+ end
+ end
+ local c = step.coverage
+ if c then
+ for g1, d1 in next, c do
+ d1[2] = pack_indexed(d1[2])
+ end
+ end
+ end
+ -- if ... chain ...
+ local rules = step.rules
+ if rules then
+ for i=1,#rules do
+ local rule = rules[i]
+ local r = rule.before if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end
+ local r = rule.after if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end
+ local r = rule.current if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end
+ local r = rule.replacements if r then rule.replacements = pack_flat (r) end -- can have holes
+ end
+ end
+ end
+ end
+ if order then
+ sequence.order = pack_indexed(order)
+ end
+ if features then
+ for script, feature in next, features do
+ features[script] = pack_normal(feature)
+ end
+ end
+ if flags then
+ sequence.flags = pack_normal(flags)
+ end
+ end
+ end
+
+ if sequences then
+ packthem(sequences)
+ end
+
+ if sublookups then
+ packthem(sublookups)
+ end
+
+ if features then
+ for k, list in next, features do
+ for feature, spec in next, list do
+ list[feature] = pack_normal(spec)
+ end
+ end
+ end
+
+ if not success(1,pass) then
+ return
+ end
+
+ end
+
+ if nt > 0 then
+
+ for pass=1,2 do
+
+ if trace_packing then
+ report_otf("start packing: stage 2, pass %s",pass)
+ end
+
+ local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass)
+
+ for unicode, description in next, descriptions do
+ local math = description.math
+ if math then
+ local kerns = math.kerns
+ if kerns then
+ math.kerns = pack_normal(kerns)
+ end
+ end
+ end
+
+ local function packthem(sequences)
+ for i=1,#sequences do
+ local sequence = sequences[i]
+ local kind = sequence.type
+ local steps = sequence.steps
+ local features = sequence.features
+ if steps then
+ for i=1,#steps do
+ local step = steps[i]
+ if kind == "gpos_pair" then
+ local c = step.coverage
+ if c then
+ if step.format == "kern" then
+ -- todo !
+ else
+ for g1, d1 in next, c do
+ for g2, d2 in next, d1 do
+ d1[g2] = pack_normal(d2)
+ end
+ end
+ end
+ end
+ elseif kind == "gpos_mark2base" or kind == "gpos_mark2mark" or kind == "gpos_mark2ligature" then
+-- local c = step.baseclasses
+-- for k, v in next, c do
+-- c[k] = pack_normal(v)
+-- end
+ end
+ local rules = step.rules
+ if rules then
+ for i=1,#rules do
+ local rule = rules[i]
+ local r = rule.before if r then rule.before = pack_normal(r) end
+ local r = rule.after if r then rule.after = pack_normal(r) end
+ local r = rule.current if r then rule.current = pack_normal(r) end
+ end
+ end
+ end
+ end
+ if features then
+ sequence.features = pack_normal(features)
+ end
+ end
+ end
+ if sequences then
+ packthem(sequences)
+ end
+ if sublookups then
+ packthem(sublookups)
+ end
+ -- features
+ if not success(2,pass) then
+ -- return
+ end
+ end
+
+ for pass=1,2 do
+ if trace_packing then
+ report_otf("start packing: stage 3, pass %s",pass)
+ end
+
+ local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass)
+
+ local function packthem(sequences)
+ for i=1,#sequences do
+ local sequence = sequences[i]
+ local kind = sequence.type
+ local steps = sequence.steps
+ local features = sequence.features
+ if steps then
+ for i=1,#steps do
+ local step = steps[i]
+ if kind == "gpos_pair" then
+ local c = step.coverage
+ if c then
+ if step.format == "kern" then
+ -- todo !
+ else
+ for g1, d1 in next, c do
+ c[g1] = pack_normal(d1)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ if sequences then
+ packthem(sequences)
+ end
+ if sublookups then
+ packthem(sublookups)
+ end
+
+ end
+
+ end
+
+ end
+end
+
+local unpacked_mt = {
+ __index =
+ function(t,k)
+ t[k] = false
+ return k -- next time true
+ end
+}
+
+function readers.unpack(data)
+
+ if data then
+ local tables = data.tables
+ if tables then
+ local resources = data.resources
+ local descriptions = data.descriptions or data.glyphs
+ local sequences = resources.sequences
+ local sublookups = resources.sublookups
+ local features = resources.features
+ local unpacked = { }
+ setmetatable(unpacked,unpacked_mt)
+ for unicode, description in next, descriptions do
+ local tv = tables[description.boundingbox]
+ if tv then
+ description.boundingbox = tv
+ end
+ local math = description.math
+ if math then
+ local kerns = math.kerns
+ if kerns then
+ local tm = tables[kerns]
+ if tm then
+ math.kerns = tm
+ kerns = unpacked[tm]
+ end
+ if kerns then
+ for k, kern in next, kerns do
+ local tv = tables[kern]
+ if tv then
+ kerns[k] = tv
+ end
+ end
+ end
+ end
+ end
+ end
+
+ local function unpackthem(sequences)
+ for i=1,#sequences do
+ local sequence = sequences[i]
+ local kind = sequence.type
+ local steps = sequence.steps
+ local order = sequence.order
+ local features = sequence.features
+ local flags = sequence.flags
+ local markclass = sequence.markclass
+ if steps then
+ for i=1,#steps do
+ local step = steps[i]
+ if kind == "gpos_pair" then
+ local c = step.coverage
+ if c then
+ if step.format == "kern" then
+ for g1, d1 in next, c do
+ local tv = tables[d1]
+ if tv then
+ c[g1] = tv
+ end
+ end
+ else
+ for g1, d1 in next, c do
+ local tv = tables[d1]
+ if tv then
+ c[g1] = tv
+ d1 = tv
+ end
+ for g2, d2 in next, d1 do
+ local tv = tables[d2]
+ if tv then
+ d1[g2] = tv
+ d2 = tv
+ end
+ local f = tables[d2[1]] if f then d2[1] = f end
+ local s = tables[d2[2]] if s then d2[2] = s end
+ end
+ end
+ end
+ end
+ elseif kind == "gpos_single" then
+ local c = step.coverage
+ if c then
+ if step.format == "kern" then
+ local tv = tables[c]
+ if tv then
+ step.coverage = tv
+ end
+ else
+ for g1, d1 in next, c do
+ local tv = tables[d1]
+ if tv then
+ c[g1] = tv
+ end
+ end
+ end
+ end
+ elseif kind == "gpos_cursive" then
+ local c = step.coverage
+ if c then
+ for g1, d1 in next, c do
+ local f = tables[d1[2]] if f then d1[2] = f end
+ local s = tables[d1[3]] if s then d1[3] = s end
+ end
+ end
+ elseif kind == "gpos_mark2base" or kind == "gpos_mark2mark" then
+ local c = step.baseclasses
+ if c then
+-- for k, v in next, c do
+-- local tv = tables[v]
+-- if tv then
+-- c[k] = tv
+-- end
+-- end
+ for g1, d1 in next, c do
+ for g2, d2 in next, d1 do
+ local tv = tables[d2]
+ if tv then
+ d1[g2] = tv
+ end
+ end
+ end
+ end
+ local c = step.coverage
+ if c then
+ for g1, d1 in next, c do
+ local tv = tables[d1[2]]
+ if tv then
+ d1[2] = tv
+ end
+ end
+ end
+ elseif kind == "gpos_mark2ligature" then
+ local c = step.baseclasses
+ if c then
+-- for k, v in next, c do
+-- local tv = tables[v]
+-- if tv then
+-- c[k] = tv
+-- end
+-- end
+ for g1, d1 in next, c do
+ for g2, d2 in next, d1 do
+ for g3, d3 in next, d2 do
+ local tv = tables[d2[g3]]
+ if tv then
+ d2[g3] = tv
+ end
+ end
+ end
+ end
+ end
+ local c = step.coverage
+ if c then
+ for g1, d1 in next, c do
+ local tv = tables[d1[2]]
+ if tv then
+ d1[2] = tv
+ end
+ end
+ end
+ end
+ local rules = step.rules
+ if rules then
+ for i=1,#rules do
+ local rule = rules[i]
+ local before = rule.before
+ if before then
+ local tv = tables[before]
+ if tv then
+ rule.before = tv
+ before = tv
+ end
+ for i=1,#before do
+ local tv = tables[before[i]]
+ if tv then
+ before[i] = tv
+ end
+ end
+ end
+ local after = rule.after
+ if after then
+ local tv = tables[after]
+ if tv then
+ rule.after = tv
+ after = tv
+ end
+ for i=1,#after do
+ local tv = tables[after[i]]
+ if tv then
+ after[i] = tv
+ end
+ end
+ end
+ local current = rule.current
+ if current then
+ local tv = tables[current]
+ if tv then
+ rule.current = tv
+ current = tv
+ end
+ for i=1,#current do
+ local tv = tables[current[i]]
+ if tv then
+ current[i] = tv
+ end
+ end
+ end
+ local replacements = rule.replacements
+ if replacements then
+ local tv = tables[replace]
+ if tv then
+ rule.replacements = tv
+ end
+ end
+ end
+ end
+ end
+ end
+ if features then
+ local tv = tables[features]
+ if tv then
+ sequence.features = tv
+ features = tv
+ end
+ for script, feature in next, features do
+ local tv = tables[feature]
+ if tv then
+ features[script] = tv
+ end
+ end
+ end
+ if order then
+ local tv = tables[order]
+ if tv then
+ sequence.order = tv
+ end
+ end
+ if flags then
+ local tv = tables[flags]
+ if tv then
+ sequence.flags = tv
+ end
+ end
+ end
+ end
+
+ if sequences then
+ unpackthem(sequences)
+ end
+
+ if sublookups then
+ unpackthem(sublookups)
+ end
+
+ if features then
+ for k, list in next, features do
+ for feature, spec in next, list do
+ local tv = tables[spec]
+ if tv then
+ list[feature] = tv
+ end
+ end
+ end
+ end
+
+ data.tables = nil
+ end
+ end
+end
+
+local mt = {
+ __index = function(t,k) -- maybe set it
+ if k == "height" then
+ local ht = t.boundingbox[4]
+ return ht < 0 and 0 or ht
+ elseif k == "depth" then
+ local dp = -t.boundingbox[2]
+ return dp < 0 and 0 or dp
+ elseif k == "width" then
+ return 0
+ elseif k == "name" then -- or maybe uni*
+ return forcenotdef and ".notdef"
+ end
+ end
+}
+
+local function sameformat(sequence,steps,first,nofsteps,kind)
+ return true
+end
+
+local function mergesteps_1(lookup,strict)
+ local steps = lookup.steps
+ local nofsteps = lookup.nofsteps
+ local first = steps[1]
+ if strict then
+ local f = first.format
+ for i=2,nofsteps do
+ if steps[i].format ~= f then
+ report("not merging %a steps of %a lookup %a, different formats",nofsteps,lookup.type,lookup.name)
+ return 0
+ end
+ end
+ end
+ report("merging %a steps of %a lookup %a",nofsteps,lookup.type,lookup.name)
+ local target = first.coverage
+ for i=2,nofsteps do
+ for k, v in next, steps[i].coverage do
+ if not target[k] then
+ target[k] = v
+ end
+ end
+ end
+ lookup.nofsteps = 1
+ lookup.merged = true
+ lookup.steps = { first }
+ return nofsteps - 1
+end
+
+
+local function mergesteps_2(lookup,strict) -- pairs
+ local steps = lookup.steps
+ local nofsteps = lookup.nofsteps
+ local first = steps[1]
+ if strict then
+ local f = first.format
+ for i=2,nofsteps do
+ if steps[i].format ~= f then
+ report("not merging %a steps of %a lookup %a, different formats",nofsteps,lookup.type,lookup.name)
+ return 0
+ end
+ end
+ end
+ report("merging %a steps of %a lookup %a",nofsteps,lookup.type,lookup.name)
+ local target = first.coverage
+ for i=2,nofsteps do
+ for k, v in next, steps[i].coverage do
+ local tk = target[k]
+ if tk then
+ for k, v in next, v do
+ if not tk[k] then
+ tk[k] = v
+ end
+ end
+ else
+ target[k] = v
+ end
+ end
+ end
+ lookup.nofsteps = 1
+ lookup.steps = { first }
+ return nofsteps - 1
+end
+
+
+local function mergesteps_3(lookup,strict) -- marks
+ local steps = lookup.steps
+ local nofsteps = lookup.nofsteps
+ local first = steps[1]
+ report("merging %a steps of %a lookup %a",nofsteps,lookup.type,lookup.name)
+ local baseclasses = { }
+ local coverage = { }
+ local used = { }
+ for i=1,nofsteps do
+ local offset = i*10
+ local step = steps[i]
+ for k, v in sortedhash(step.baseclasses) do
+ baseclasses[offset+k] = v
+ end
+ for k, v in next, step.coverage do
+ local tk = coverage[k]
+ if tk then
+ for k, v in next, v do
+ if not tk[k] then
+ tk[k] = v
+ local c = offset + v[1]
+ v[1] = c
+ if not used[c] then
+ used[c] = true
+ end
+ end
+ end
+ else
+ coverage[k] = v
+ local c = offset + v[1]
+ v[1] = c
+ if not used[c] then
+ used[c] = true
+ end
+ end
+ end
+ end
+ for k, v in next, baseclasses do
+ if not used[k] then
+ baseclasses[k] = nil
+ report("discarding not used baseclass %i",k)
+ end
+ end
+ first.baseclasses = baseclasses
+ first.coverage = coverage
+ lookup.nofsteps = 1
+ lookup.steps = { first }
+ return nofsteps - 1
+end
+
+local function nested(old,new)
+ for k, v in next, old do
+ if k == "ligature" then
+ if not new.ligature then
+ new.ligature = v
+ end
+ else
+ local n = new[k]
+ if n then
+ nested(v,n)
+ else
+ new[k] = v
+ end
+ end
+ end
+end
+
+local function mergesteps_4(lookup) -- ligatures
+ local steps = lookup.steps
+ local nofsteps = lookup.nofsteps
+ local first = steps[1]
+ report("merging %a steps of %a lookup %a",nofsteps,lookup.type,lookup.name)
+ local target = first.coverage
+ for i=2,nofsteps do
+ for k, v in next, steps[i].coverage do
+ local tk = target[k]
+ if tk then
+ nested(v,tk)
+ else
+ target[k] = v
+ end
+ end
+ end
+ lookup.nofsteps = 1
+ lookup.steps = { first }
+ return nofsteps - 1
+end
+
+local function checkkerns(lookup)
+ local steps = lookup.steps
+ local nofsteps = lookup.nofsteps
+ for i=1,nofsteps do
+ local step = steps[i]
+ if step.format == "pair" then
+ local coverage = step.coverage
+ local kerns = true
+ for g1, d1 in next, coverage do
+ if d1[1] ~= 0 or d1[2] ~= 0 or d1[4] ~= 0 then
+ kerns = false
+ break
+ end
+ end
+ if kerns then
+ report("turning pairs of step %a of %a lookup %a into kerns",i,lookup.type,lookup.name)
+ for g1, d1 in next, coverage do
+ coverage[g1] = d1[3]
+ end
+ step.format = "kern"
+ end
+ end
+ end
+end
+
+local function checkpairs(lookup)
+ local steps = lookup.steps
+ local nofsteps = lookup.nofsteps
+ local kerned = 0
+ for i=1,nofsteps do
+ local step = steps[i]
+ if step.format == "pair" then
+ local coverage = step.coverage
+ local kerns = true
+ for g1, d1 in next, coverage do
+ for g2, d2 in next, d1 do
+ if d2[2] then
+ kerns = false
+ break
+ else
+ local v = d2[1]
+ if v[1] ~= 0 or v[2] ~= 0 or v[4] ~= 0 then
+ kerns = false
+ break
+ end
+ end
+ end
+ end
+ if kerns then
+ report("turning pairs of step %a of %a lookup %a into kerns",i,lookup.type,lookup.name)
+ for g1, d1 in next, coverage do
+ for g2, d2 in next, d1 do
+ d1[g2] = d2[1][3]
+ end
+ end
+ step.format = "kern"
+ kerned = kerned + 1
+ end
+ end
+ end
+ return kerned
+end
+
+function readers.compact(data)
+ if not data or data.compacted then
+ return
+ else
+ data.compacted = true
+ end
+ local resources = data.resources
+ local merged = 0
+ local kerned = 0
+ local allsteps = 0
+ local function compact(what)
+ local lookups = resources[what]
+ if lookups then
+ for i=1,#lookups do
+ local lookup = lookups[i]
+ local nofsteps = lookup.nofsteps
+ allsteps = allsteps + nofsteps
+ if nofsteps > 1 then
+ local kind = lookup.type
+ if kind == "gsub_single" or kind == "gsub_alternate" or kind == "gsub_multiple" then
+ merged = merged + mergesteps_1(lookup)
+ elseif kind == "gsub_ligature" then
+ merged = merged + mergesteps_4(lookup)
+ elseif kind == "gpos_single" then
+ merged = merged + mergesteps_1(lookup,true)
+ checkkerns(lookup)
+ elseif kind == "gpos_pair" then
+ merged = merged + mergesteps_2(lookup,true)
+ kerned = kerned + checkpairs(lookup)
+ elseif kind == "gpos_cursive" then
+ merged = merged + mergesteps_2(lookup)
+ elseif kind == "gpos_mark2mark" or kind == "gpos_mark2base" or kind == "gpos_mark2ligature" then
+ merged = merged + mergesteps_3(lookup)
+ end
+ end
+ end
+ else
+ report("no lookups in %a",what)
+ end
+ end
+ compact("sequences")
+ compact("sublookups")
+ if merged > 0 then
+ report("%i steps of %i removed due to merging",merged,allsteps)
+ end
+ if kerned > 0 then
+ report("%i steps of %i steps turned from pairs into kerns",kerned,allsteps)
+ end
+end
+
+function readers.expand(data)
+ if not data or data.expanded then
+ return
+ else
+ data.expanded = true
+ end
+ local resources = data.resources
+ local sublookups = resources.sublookups
+ local sequences = resources.sequences -- were one level up
+ local markclasses = resources.markclasses
+ local descriptions = data.descriptions
+ if descriptions then
+ local defaultwidth = resources.defaultwidth or 0
+ local defaultheight = resources.defaultheight or 0
+ local defaultdepth = resources.defaultdepth or 0
+ local basename = trace_markwidth and file.basename(resources.filename)
+ for u, d in next, descriptions do
+ local bb = d.boundingbox
+ local wd = d.width
+ if not wd then
+ -- or bb?
+ d.width = defaultwidth
+ elseif trace_markwidth and wd ~= 0 and d.class == "mark" then
+ report("mark %a with width %b found in %a",d.name or "<noname>",wd,basename)
+ end
+ if bb then
+ local ht = bb[4]
+ local dp = -bb[2]
+ if ht == 0 or ht < 0 then
+ -- not set
+ else
+ d.height = ht
+ end
+ if dp == 0 or dp < 0 then
+ -- not set
+ else
+ d.depth = dp
+ end
+ end
+ end
+ end
+ if sequences then
+ -- we also need to do sublookups
+ for i=1,#sequences do
+ local sequence = sequences[i]
+ local steps = sequence.steps
+ if steps then
+ local kind = sequence.type
+ local markclass = sequence.markclass
+ if markclass then
+ if not markclasses then
+ report_warning("missing markclasses")
+ sequence.markclass = false
+ else
+ sequence.markclass = markclasses[markclass]
+ end
+ end
+ for i=1,sequence.nofsteps do
+ local step = steps[i]
+ local baseclasses = step.baseclasses
+ if baseclasses then
+ local coverage = step.coverage
+ for k, v in next, coverage do
+-- v[1] = baseclasses[v[2]] -- slot 1 is a placeholder
+ v[1] = baseclasses[v[1]]
+ end
+ elseif kind == "gpos_cursive" then
+ local coverage = step.coverage
+ for k, v in next, coverage do
+ v[1] = coverage -- slot 1 is a placeholder
+ end
+ end
+ local rules = step.rules
+ if rules then
+ local rulehash = { }
+ local rulesize = 0
+ local coverage = { }
+ local lookuptype = sequence.type
+ step.coverage = coverage -- combined hits
+ for nofrules=1,#rules do
+ local rule = rules[nofrules]
+ local current = rule.current
+ local before = rule.before
+ local after = rule.after
+ local replacements = rule.replacements
+ local sequence = { }
+ local nofsequences = 0
+ if before then
+ for n=1,#before do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = before[n]
+ end
+ end
+ local start = nofsequences + 1
+ for n=1,#current do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = current[n]
+ end
+ local stop = nofsequences
+ if after then
+ for n=1,#after do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = after[n]
+ end
+ end
+ local lookups = rule.lookups
+ local subtype = nil
+ if lookups then
+ for k, v in next, lookups do
+ local lookup = sublookups[v]
+ if lookup then
+ lookups[k] = lookup
+ if not subtype then
+ subtype = lookup.type
+ end
+ else
+ -- already expanded
+ end
+ end
+ end
+ if sequence[1] then -- we merge coverage into one
+ rulesize = rulesize + 1
+ rulehash[rulesize] = {
+ nofrules, -- 1
+ lookuptype, -- 2
+ sequence, -- 3
+ start, -- 4
+ stop, -- 5
+ rule.lookups, -- 6
+ replacements, -- 7
+ subtype, -- 8
+ }
+ for unic in next, sequence[start] do
+ local cu = coverage[unic]
+ if not cu then
+ coverage[unic] = rulehash -- can now be done cleaner i think
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/tex/context/base/font-syn.lua b/tex/context/base/font-syn.lua
index fa152466d..1e9ed9076 100644
--- a/tex/context/base/font-syn.lua
+++ b/tex/context/base/font-syn.lua
@@ -313,6 +313,7 @@ but to keep the overview, we define them here.</p>
filters.otf = fonts.handlers.otf.readers.getinfo
filters.ttf = filters.otf
filters.ttc = filters.otf
+-- filters.ttx = filters.otf
local function normalize(t)
local boundingbox = t.fontbbox
@@ -1001,6 +1002,7 @@ local function analyzefiles(olddata)
if trace_names then
report_names("identifying %s font %a",suffix,completename)
end
+ -- needs checking with ttc / ttx : date not updated ?
local result = nil
local modification = lfs.attributes(completename,"modification")
if olddata and modification and modification > 0 then
@@ -1013,8 +1015,10 @@ local function analyzefiles(olddata)
result = oldspecification
specifications[#specifications + 1] = result
else
+ -- ??
end
else
+ -- ??
end
elseif oldrejected[storedname] == modification then
result = false
@@ -1026,7 +1030,7 @@ local function analyzefiles(olddata)
if result then
if #result > 0 then
for r=1,#result do
- local ok = check_name(data,result[r],storedname,modification,suffix,r-1) -- subfonts start at zero
+ local ok = check_name(data,result[r],storedname,modification,suffix,r) -- subfonts start at zero
-- if not ok then
-- nofskipped = nofskipped + 1
-- end
diff --git a/tex/context/base/font-tmp.lua b/tex/context/base/font-tmp.lua
index fab693d44..8db5cff14 100644
--- a/tex/context/base/font-tmp.lua
+++ b/tex/context/base/font-tmp.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['font-tmp'] = {
license = "see context related readme files"
}
--- There is a complet efeature loader but it needs a bit of testing, first so this
+-- There is a complete feature loader but it needs a bit of testing, first so this
-- one does design size only (as needed for identifying).
local next, type = next, type
diff --git a/tex/context/base/meta-imp-outlines.mkiv b/tex/context/base/meta-imp-outlines.mkiv
index 0f3d1875d..e6257906b 100644
--- a/tex/context/base/meta-imp-outlines.mkiv
+++ b/tex/context/base/meta-imp-outlines.mkiv
@@ -158,7 +158,8 @@ end
\starttext
% \setupbodyfont[pagella]
-% \showshape[character=3,alternative=page]
+
+% \definedfont[latinmodern-math]\showshape[index=3078,alternative=page]
% \setupbodyfont[pagella]
% \showshape[character=all,alternative=page]
diff --git a/tex/context/base/mult-de.mkii b/tex/context/base/mult-de.mkii
index 90aae390e..213740179 100644
--- a/tex/context/base/mult-de.mkii
+++ b/tex/context/base/mult-de.mkii
@@ -78,8 +78,10 @@
\setinterfacevariable{appendices}{anhaenge}
\setinterfacevariable{appendix}{anhang}
\setinterfacevariable{april}{april}
+\setinterfacevariable{atleftmargin}{atleftmargin}
\setinterfacevariable{atmargin}{amrand}
\setinterfacevariable{atpage}{aufseite}
+\setinterfacevariable{atrightmargin}{atrightmargin}
\setinterfacevariable{attachment}{attachment}
\setinterfacevariable{august}{august}
\setinterfacevariable{author}{autor}
@@ -223,12 +225,23 @@
\setinterfacevariable{indices}{indizies}
\setinterfacevariable{informeel}{informeel}
\setinterfacevariable{inherit}{inherit}
+\setinterfacevariable{ininner}{ininner}
+\setinterfacevariable{ininneredge}{ininneredge}
+\setinterfacevariable{ininnermargin}{ininnermargin}
\setinterfacevariable{inleft}{imlinken}
+\setinterfacevariable{inleftedge}{imlinkenrand}
+\setinterfacevariable{inleftmargin}{inlinkermarginale}
\setinterfacevariable{inmargin}{imrand}
\setinterfacevariable{inner}{innen}
\setinterfacevariable{inneredge}{inneredge}
\setinterfacevariable{innermargin}{innermargin}
+\setinterfacevariable{inother}{inother}
+\setinterfacevariable{inouter}{inouter}
+\setinterfacevariable{inouteredge}{inouteredge}
+\setinterfacevariable{inoutermargin}{inoutermargin}
\setinterfacevariable{inright}{imrechten}
+\setinterfacevariable{inrightedge}{imrechtenrand}
+\setinterfacevariable{inrightmargin}{inrechtermarginale}
\setinterfacevariable{interaction}{interaktion}
\setinterfacevariable{interactionmenu}{interaktionsmenue}
\setinterfacevariable{intermezzi}{intermezzi}
@@ -280,6 +293,7 @@
\setinterfacevariable{march}{maerz}
\setinterfacevariable{margin}{marginalie}
\setinterfacevariable{marginedge}{marginalkante}
+\setinterfacevariable{margintext}{marginaltext}
\setinterfacevariable{margintitle}{marginaltitel}
\setinterfacevariable{marking}{beschriftung}
\setinterfacevariable{math}{math}
diff --git a/tex/context/base/mult-def.lua b/tex/context/base/mult-def.lua
index c0831de2d..60ffe9ebb 100644
--- a/tex/context/base/mult-def.lua
+++ b/tex/context/base/mult-def.lua
@@ -7,6 +7,8 @@ if not modules then modules = { } end modules ['mult-def'] = {
dataonly = true,
}
+-- we can remove some commands, like inleftmargin and so (see typo-mar.mkiv)
+
return {
["commands"]={
["CAPPED"]={
@@ -13694,6 +13696,16 @@ return {
["pe"]="ارث‌بردن",
["ro"]="inherit",
},
+ ["ininner"]={
+ ["cs"]="ininner",
+ ["de"]="ininner",
+ ["en"]="ininner",
+ ["fr"]="ininner",
+ ["it"]="ininner",
+ ["nl"]="inbinnen",
+ ["pe"]="درداخلی",
+ ["ro"]="ininner",
+ },
["inleft"]={
["cs"]="vlevo",
["de"]="imlinken",
@@ -13704,6 +13716,26 @@ return {
["pe"]="درون‌چپ",
["ro"]="instanga",
},
+ ["inleftedge"]={
+ ["cs"]="nalevo",
+ ["de"]="imlinkenrand",
+ ["en"]="inleftedge",
+ ["fr"]="dansbordgauche",
+ ["it"]="inlatosinistro",
+ ["nl"]="inlinkerrand",
+ ["pe"]="درلبه‌چپ",
+ ["ro"]="inparteastanga",
+ },
+ ["inleftmargin"]={
+ ["cs"]="nalevyokraj",
+ ["de"]="inlinkermarginale",
+ ["en"]="inleftmargin",
+ ["fr"]="dansmargegauche",
+ ["it"]="inmarginesinistro",
+ ["nl"]="inlinkermarge",
+ ["pe"]="درحاشیه‌چپ",
+ ["ro"]="inmargineastanga",
+ },
["inmargin"]={
["cs"]="naokraji",
["de"]="imrand",
@@ -13714,6 +13746,38 @@ return {
["pe"]="درون‌حاشیه",
["ro"]="inmargine",
},
+ ["inoutermargin"]={
+ ["en"]="inoutermargin",
+ },
+ ["ininnermargin"]={
+ ["en"]="ininnermargin",
+ },
+ ["inouteredge"]={
+ ["en"]="inouteredge",
+ },
+ ["ininneredge"]={
+ ["en"]="ininneredge",
+ },
+ ["atleftmargin"]={
+ ["cs"]="atleftmargin",
+ ["de"]="atleftmargin",
+ ["en"]="atleftmargin",
+ ["fr"]="atleftmargin",
+ ["it"]="atleftmargin",
+ ["nl"]="oplinkermarge",
+ ["pe"]="درحاشیه‌چپ",
+ ["ro"]="atleftmargin",
+ },
+ ["atrightmargin"]={
+ ["cs"]="atrightmargin",
+ ["de"]="atrightmargin",
+ ["en"]="atrightmargin",
+ ["fr"]="atrightmargin",
+ ["it"]="atrightmargin",
+ ["nl"]="oprechtermarge",
+ ["pe"]="درحاشیه‌راست",
+ ["ro"]="atrightmargin",
+ },
["inner"]={
["cs"]="uvnitr",
["de"]="innen",
@@ -13744,6 +13808,26 @@ return {
["pe"]="حاشیه‌داخلی",
["ro"]="innermargin",
},
+ ["inother"]={
+ ["cs"]="inother",
+ ["de"]="inother",
+ ["en"]="inother",
+ ["fr"]="inother",
+ ["it"]="inother",
+ ["nl"]="inandere",
+ ["pe"]="inother",
+ ["ro"]="inother",
+ },
+ ["inouter"]={
+ ["cs"]="inouter",
+ ["de"]="inouter",
+ ["en"]="inouter",
+ ["fr"]="inouter",
+ ["it"]="inouter",
+ ["nl"]="inbuiten",
+ ["pe"]="درخارجی",
+ ["ro"]="inouter",
+ },
["inright"]={
["cs"]="vpravo",
["de"]="imrechten",
@@ -13754,6 +13838,26 @@ return {
["pe"]="درون‌راست",
["ro"]="indreapta",
},
+ ["inrightedge"]={
+ ["cs"]="napravo",
+ ["de"]="imrechtenrand",
+ ["en"]="inrightedge",
+ ["fr"]="dansborddroit",
+ ["it"]="inlatodestro",
+ ["nl"]="inrechterrand",
+ ["pe"]="درلبه‌راست",
+ ["ro"]="inparteadreapta",
+ },
+ ["inrightmargin"]={
+ ["cs"]="napravyokraj",
+ ["de"]="inrechtermarginale",
+ ["en"]="inrightmargin",
+ ["fr"]="dansmargedroite",
+ ["it"]="inmarginedestro",
+ ["nl"]="inrechtermarge",
+ ["pe"]="درحاشیه‌راست",
+ ["ro"]="inmargineadreapta",
+ },
["interaction"]={
["cs"]="interakce",
["de"]="interaktion",
@@ -14254,6 +14358,16 @@ return {
["pe"]="لبه‌حاشیه",
["ro"]="marginebordura",
},
+ ["margintext"]={
+ ["cs"]="marginalnitext",
+ ["de"]="marginaltext",
+ ["en"]="margintext",
+ ["fr"]="textemarge",
+ ["it"]="testoinmargine",
+ ["nl"]="margetekst",
+ ["pe"]="متن‌حاشیه",
+ ["ro"]="textmarginal",
+ },
["margintitle"]={
["cs"]="titulmarginalie",
["de"]="marginaltitel",
diff --git a/tex/context/base/mult-en.mkii b/tex/context/base/mult-en.mkii
index b08070ba0..cf1b68017 100644
--- a/tex/context/base/mult-en.mkii
+++ b/tex/context/base/mult-en.mkii
@@ -78,8 +78,10 @@
\setinterfacevariable{appendices}{appendices}
\setinterfacevariable{appendix}{appendix}
\setinterfacevariable{april}{april}
+\setinterfacevariable{atleftmargin}{atleftmargin}
\setinterfacevariable{atmargin}{atmargin}
\setinterfacevariable{atpage}{atpage}
+\setinterfacevariable{atrightmargin}{atrightmargin}
\setinterfacevariable{attachment}{attachment}
\setinterfacevariable{august}{august}
\setinterfacevariable{author}{author}
@@ -223,12 +225,23 @@
\setinterfacevariable{indices}{indices}
\setinterfacevariable{informeel}{informeel}
\setinterfacevariable{inherit}{inherit}
+\setinterfacevariable{ininner}{ininner}
+\setinterfacevariable{ininneredge}{ininneredge}
+\setinterfacevariable{ininnermargin}{ininnermargin}
\setinterfacevariable{inleft}{inleft}
+\setinterfacevariable{inleftedge}{inleftedge}
+\setinterfacevariable{inleftmargin}{inleftmargin}
\setinterfacevariable{inmargin}{inmargin}
\setinterfacevariable{inner}{inner}
\setinterfacevariable{inneredge}{inneredge}
\setinterfacevariable{innermargin}{innermargin}
+\setinterfacevariable{inother}{inother}
+\setinterfacevariable{inouter}{inouter}
+\setinterfacevariable{inouteredge}{inouteredge}
+\setinterfacevariable{inoutermargin}{inoutermargin}
\setinterfacevariable{inright}{inright}
+\setinterfacevariable{inrightedge}{inrightedge}
+\setinterfacevariable{inrightmargin}{inrightmargin}
\setinterfacevariable{interaction}{interaction}
\setinterfacevariable{interactionmenu}{interactionmenu}
\setinterfacevariable{intermezzi}{intermezzi}
@@ -280,6 +293,7 @@
\setinterfacevariable{march}{march}
\setinterfacevariable{margin}{margin}
\setinterfacevariable{marginedge}{marginedge}
+\setinterfacevariable{margintext}{margintext}
\setinterfacevariable{margintitle}{margintitle}
\setinterfacevariable{marking}{marking}
\setinterfacevariable{math}{math}
diff --git a/tex/context/base/mult-fr.mkii b/tex/context/base/mult-fr.mkii
index d76da18d9..1f0b8558e 100644
--- a/tex/context/base/mult-fr.mkii
+++ b/tex/context/base/mult-fr.mkii
@@ -78,8 +78,10 @@
\setinterfacevariable{appendices}{annexes}
\setinterfacevariable{appendix}{annexe}
\setinterfacevariable{april}{avril}
+\setinterfacevariable{atleftmargin}{atleftmargin}
\setinterfacevariable{atmargin}{alamarge}
\setinterfacevariable{atpage}{alapage}
+\setinterfacevariable{atrightmargin}{atrightmargin}
\setinterfacevariable{attachment}{attachment}
\setinterfacevariable{august}{août}
\setinterfacevariable{author}{auteur}
@@ -223,12 +225,23 @@
\setinterfacevariable{indices}{indices}
\setinterfacevariable{informeel}{informeel}
\setinterfacevariable{inherit}{herite}
+\setinterfacevariable{ininner}{ininner}
+\setinterfacevariable{ininneredge}{ininneredge}
+\setinterfacevariable{ininnermargin}{ininnermargin}
\setinterfacevariable{inleft}{dansgauche}
+\setinterfacevariable{inleftedge}{dansbordgauche}
+\setinterfacevariable{inleftmargin}{dansmargegauche}
\setinterfacevariable{inmargin}{dansmarge}
\setinterfacevariable{inner}{interieur}
\setinterfacevariable{inneredge}{bordinterieur}
\setinterfacevariable{innermargin}{margeinterieure}
+\setinterfacevariable{inother}{inother}
+\setinterfacevariable{inouter}{inouter}
+\setinterfacevariable{inouteredge}{inouteredge}
+\setinterfacevariable{inoutermargin}{inoutermargin}
\setinterfacevariable{inright}{dansdroit}
+\setinterfacevariable{inrightedge}{dansborddroit}
+\setinterfacevariable{inrightmargin}{dansmargedroite}
\setinterfacevariable{interaction}{interaction}
\setinterfacevariable{interactionmenu}{menuinteraction}
\setinterfacevariable{intermezzi}{intermezzi}
@@ -280,6 +293,7 @@
\setinterfacevariable{march}{mars}
\setinterfacevariable{margin}{marge}
\setinterfacevariable{marginedge}{bordmarge}
+\setinterfacevariable{margintext}{textemarge}
\setinterfacevariable{margintitle}{titremarge}
\setinterfacevariable{marking}{marquage}
\setinterfacevariable{math}{math}
diff --git a/tex/context/base/mult-it.mkii b/tex/context/base/mult-it.mkii
index 6474d93c4..60de2b489 100644
--- a/tex/context/base/mult-it.mkii
+++ b/tex/context/base/mult-it.mkii
@@ -78,8 +78,10 @@
\setinterfacevariable{appendices}{appendici}
\setinterfacevariable{appendix}{appendice}
\setinterfacevariable{april}{aprile}
+\setinterfacevariable{atleftmargin}{atleftmargin}
\setinterfacevariable{atmargin}{almargine}
\setinterfacevariable{atpage}{apagina}
+\setinterfacevariable{atrightmargin}{atrightmargin}
\setinterfacevariable{attachment}{attachment}
\setinterfacevariable{august}{agosto}
\setinterfacevariable{author}{autore}
@@ -223,12 +225,23 @@
\setinterfacevariable{indices}{indicianalitici}
\setinterfacevariable{informeel}{informeel}
\setinterfacevariable{inherit}{inherit}
+\setinterfacevariable{ininner}{ininner}
+\setinterfacevariable{ininneredge}{ininneredge}
+\setinterfacevariable{ininnermargin}{ininnermargin}
\setinterfacevariable{inleft}{insinistra}
+\setinterfacevariable{inleftedge}{inlatosinistro}
+\setinterfacevariable{inleftmargin}{inmarginesinistro}
\setinterfacevariable{inmargin}{inmargine}
\setinterfacevariable{inner}{interno}
\setinterfacevariable{inneredge}{bordointerno}
\setinterfacevariable{innermargin}{margineinterno}
+\setinterfacevariable{inother}{inother}
+\setinterfacevariable{inouter}{inouter}
+\setinterfacevariable{inouteredge}{inouteredge}
+\setinterfacevariable{inoutermargin}{inoutermargin}
\setinterfacevariable{inright}{indestra}
+\setinterfacevariable{inrightedge}{inlatodestro}
+\setinterfacevariable{inrightmargin}{inmarginedestro}
\setinterfacevariable{interaction}{interazione}
\setinterfacevariable{interactionmenu}{menuinterattivo}
\setinterfacevariable{intermezzi}{intermezzi}
@@ -280,6 +293,7 @@
\setinterfacevariable{march}{marzo}
\setinterfacevariable{margin}{margine}
\setinterfacevariable{marginedge}{bordomargine}
+\setinterfacevariable{margintext}{testoinmargine}
\setinterfacevariable{margintitle}{titoloinmargine}
\setinterfacevariable{marking}{marcatura}
\setinterfacevariable{math}{math}
diff --git a/tex/context/base/mult-nl.mkii b/tex/context/base/mult-nl.mkii
index 22350dc50..4769a0f28 100644
--- a/tex/context/base/mult-nl.mkii
+++ b/tex/context/base/mult-nl.mkii
@@ -78,8 +78,10 @@
\setinterfacevariable{appendices}{bijlagen}
\setinterfacevariable{appendix}{bijlage}
\setinterfacevariable{april}{april}
+\setinterfacevariable{atleftmargin}{oplinkermarge}
\setinterfacevariable{atmargin}{opmarge}
\setinterfacevariable{atpage}{oppagina}
+\setinterfacevariable{atrightmargin}{oprechtermarge}
\setinterfacevariable{attachment}{aanhangsel}
\setinterfacevariable{august}{augustus}
\setinterfacevariable{author}{auteur}
@@ -223,12 +225,23 @@
\setinterfacevariable{indices}{indices}
\setinterfacevariable{informeel}{informeel}
\setinterfacevariable{inherit}{erf}
+\setinterfacevariable{ininner}{inbinnen}
+\setinterfacevariable{ininneredge}{ininneredge}
+\setinterfacevariable{ininnermargin}{ininnermargin}
\setinterfacevariable{inleft}{inlinker}
+\setinterfacevariable{inleftedge}{inlinkerrand}
+\setinterfacevariable{inleftmargin}{inlinkermarge}
\setinterfacevariable{inmargin}{inmarge}
\setinterfacevariable{inner}{binnen}
\setinterfacevariable{inneredge}{binnenrand}
\setinterfacevariable{innermargin}{binnenmarge}
+\setinterfacevariable{inother}{inandere}
+\setinterfacevariable{inouter}{inbuiten}
+\setinterfacevariable{inouteredge}{inouteredge}
+\setinterfacevariable{inoutermargin}{inoutermargin}
\setinterfacevariable{inright}{inrechter}
+\setinterfacevariable{inrightedge}{inrechterrand}
+\setinterfacevariable{inrightmargin}{inrechtermarge}
\setinterfacevariable{interaction}{interactie}
\setinterfacevariable{interactionmenu}{interactiemenu}
\setinterfacevariable{intermezzi}{intermezzos}
@@ -280,6 +293,7 @@
\setinterfacevariable{march}{maart}
\setinterfacevariable{margin}{marge}
\setinterfacevariable{marginedge}{kantlijn}
+\setinterfacevariable{margintext}{margetekst}
\setinterfacevariable{margintitle}{margetitel}
\setinterfacevariable{marking}{markering}
\setinterfacevariable{math}{math}
diff --git a/tex/context/base/mult-pe.mkii b/tex/context/base/mult-pe.mkii
index 32cf32db1..79a696d45 100644
--- a/tex/context/base/mult-pe.mkii
+++ b/tex/context/base/mult-pe.mkii
@@ -78,8 +78,10 @@
\setinterfacevariable{appendices}{پیوستها}
\setinterfacevariable{appendix}{پیوست}
\setinterfacevariable{april}{آوریل}
+\setinterfacevariable{atleftmargin}{درحاشیه‌چپ}
\setinterfacevariable{atmargin}{درحاشیه}
\setinterfacevariable{atpage}{درصفحه}
+\setinterfacevariable{atrightmargin}{درحاشیه‌راست}
\setinterfacevariable{attachment}{attachment}
\setinterfacevariable{august}{آگوست}
\setinterfacevariable{author}{author}
@@ -223,12 +225,23 @@
\setinterfacevariable{indices}{نمایه‌ها}
\setinterfacevariable{informeel}{informeel}
\setinterfacevariable{inherit}{ارث‌بردن}
+\setinterfacevariable{ininner}{درداخلی}
+\setinterfacevariable{ininneredge}{ininneredge}
+\setinterfacevariable{ininnermargin}{ininnermargin}
\setinterfacevariable{inleft}{درون‌چپ}
+\setinterfacevariable{inleftedge}{درلبه‌چپ}
+\setinterfacevariable{inleftmargin}{درحاشیه‌چپ}
\setinterfacevariable{inmargin}{درون‌حاشیه}
\setinterfacevariable{inner}{داخلی}
\setinterfacevariable{inneredge}{لبه‌داخلی}
\setinterfacevariable{innermargin}{حاشیه‌داخلی}
+\setinterfacevariable{inother}{inother}
+\setinterfacevariable{inouter}{درخارجی}
+\setinterfacevariable{inouteredge}{inouteredge}
+\setinterfacevariable{inoutermargin}{inoutermargin}
\setinterfacevariable{inright}{درون‌راست}
+\setinterfacevariable{inrightedge}{درلبه‌راست}
+\setinterfacevariable{inrightmargin}{درحاشیه‌راست}
\setinterfacevariable{interaction}{پانل}
\setinterfacevariable{interactionmenu}{منوی‌پانل}
\setinterfacevariable{intermezzi}{میان‌پرده‌ها}
@@ -280,6 +293,7 @@
\setinterfacevariable{march}{مارس}
\setinterfacevariable{margin}{حاشیه}
\setinterfacevariable{marginedge}{لبه‌حاشیه}
+\setinterfacevariable{margintext}{متن‌حاشیه}
\setinterfacevariable{margintitle}{عنوان‌حاشیه}
\setinterfacevariable{marking}{نشانه‌گذاری}
\setinterfacevariable{math}{math}
diff --git a/tex/context/base/mult-ro.mkii b/tex/context/base/mult-ro.mkii
index a5d90033f..a9d9f88ff 100644
--- a/tex/context/base/mult-ro.mkii
+++ b/tex/context/base/mult-ro.mkii
@@ -78,8 +78,10 @@
\setinterfacevariable{appendices}{apendixuri}
\setinterfacevariable{appendix}{apendix}
\setinterfacevariable{april}{aprilie}
+\setinterfacevariable{atleftmargin}{atleftmargin}
\setinterfacevariable{atmargin}{lamargine}
\setinterfacevariable{atpage}{lapagina}
+\setinterfacevariable{atrightmargin}{atrightmargin}
\setinterfacevariable{attachment}{attachment}
\setinterfacevariable{august}{august}
\setinterfacevariable{author}{autor}
@@ -223,12 +225,23 @@
\setinterfacevariable{indices}{indexuri}
\setinterfacevariable{informeel}{informeel}
\setinterfacevariable{inherit}{inherit}
+\setinterfacevariable{ininner}{ininner}
+\setinterfacevariable{ininneredge}{ininneredge}
+\setinterfacevariable{ininnermargin}{ininnermargin}
\setinterfacevariable{inleft}{instanga}
+\setinterfacevariable{inleftedge}{inparteastanga}
+\setinterfacevariable{inleftmargin}{inmargineastanga}
\setinterfacevariable{inmargin}{inmargine}
\setinterfacevariable{inner}{intern}
\setinterfacevariable{inneredge}{inneredge}
\setinterfacevariable{innermargin}{innermargin}
+\setinterfacevariable{inother}{inother}
+\setinterfacevariable{inouter}{inouter}
+\setinterfacevariable{inouteredge}{inouteredge}
+\setinterfacevariable{inoutermargin}{inoutermargin}
\setinterfacevariable{inright}{indreapta}
+\setinterfacevariable{inrightedge}{inparteadreapta}
+\setinterfacevariable{inrightmargin}{inmargineadreapta}
\setinterfacevariable{interaction}{interactiune}
\setinterfacevariable{interactionmenu}{meniuinteractiune}
\setinterfacevariable{intermezzi}{intermezzi}
@@ -280,6 +293,7 @@
\setinterfacevariable{march}{martie}
\setinterfacevariable{margin}{margine}
\setinterfacevariable{marginedge}{marginebordura}
+\setinterfacevariable{margintext}{textmarginal}
\setinterfacevariable{margintitle}{titlumarginal}
\setinterfacevariable{marking}{marcaje}
\setinterfacevariable{math}{math}
diff --git a/tex/context/base/node-nut.lua b/tex/context/base/node-nut.lua
index fb30ff0dc..554d74ec5 100644
--- a/tex/context/base/node-nut.lua
+++ b/tex/context/base/node-nut.lua
@@ -94,14 +94,14 @@ local direct = node.direct
local fastcopy = table.fastcopy
-if type(direct) ~= "table" then
- return
-elseif gonuts then
- statistics.register("running in nuts mode", function() return "yes" end)
-else
- statistics.register("running in nuts mode", function() return "no" end)
- return
-end
+-- if type(direct) ~= "table" then
+-- return
+-- elseif gonuts then
+-- statistics.register("running in nuts mode", function() return "yes" end)
+-- else
+-- statistics.register("running in nuts mode", function() return "no" end)
+-- return
+-- end
local texget = tex.get
diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf
index 2c3b549c4..86821739d 100644
--- a/tex/context/base/status-files.pdf
+++ b/tex/context/base/status-files.pdf
Binary files differ
diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf
index 0fee76e32..b22b8d62b 100644
--- a/tex/context/base/status-lua.pdf
+++ b/tex/context/base/status-lua.pdf
Binary files differ
diff --git a/tex/context/base/strc-bkm.mkiv b/tex/context/base/strc-bkm.mkiv
index 9688a1f93..5a8dba562 100644
--- a/tex/context/base/strc-bkm.mkiv
+++ b/tex/context/base/strc-bkm.mkiv
@@ -15,6 +15,8 @@
\registerctxluafile{strc-bkm}{1.001}
+% \enabledirectives[references.bookmarks.preroll]
+
\unprotect
%D Bookmarks are a very viewer dependent feature. They are mostly used
diff --git a/tex/context/base/typo-mar.mkiv b/tex/context/base/typo-mar.mkiv
index d5869b459..4ca935ce2 100644
--- a/tex/context/base/typo-mar.mkiv
+++ b/tex/context/base/typo-mar.mkiv
@@ -339,44 +339,44 @@
% lines
-\definemargindata [inleftmargin] [\v!left ] [\c!margin=\v!margin,\c!width=\leftmarginwidth ,\c!style=,\c!color=]
-\definemargindata [inrightmargin] [\v!right] [\c!margin=\v!margin,\c!width=\rightmarginwidth,\c!style=,\c!color=]
-\definemargindata [inoutermargin] [\v!outer] [\c!margin=\v!margin,\c!width=\outermarginwidth,\c!style=,\c!color=]
-\definemargindata [ininnermargin] [\v!inner] [\c!margin=\v!margin,\c!width=\innermarginwidth,\c!style=,\c!color=]
+\definemargindata [\v!inleftmargin] [\v!left ] [\c!margin=\v!margin,\c!width=\leftmarginwidth ,\c!style=,\c!color=]
+\definemargindata [\v!inrightmargin] [\v!right] [\c!margin=\v!margin,\c!width=\rightmarginwidth,\c!style=,\c!color=]
+\definemargindata [\v!inoutermargin] [\v!outer] [\c!margin=\v!margin,\c!width=\outermarginwidth,\c!style=,\c!color=]
+\definemargindata [\v!ininnermargin] [\v!inner] [\c!margin=\v!margin,\c!width=\innermarginwidth,\c!style=,\c!color=]
-\definemargindata [inleftedge] [\v!left ] [\c!margin=\v!edge ,\c!width=\leftedgewidth ,\c!style=,\c!color=,\c!category=\v!edge]
-\definemargindata [inrightedge] [\v!right] [\c!margin=\v!edge ,\c!width=\rightedgewidth ,\c!style=,\c!color=,\c!category=\v!edge]
-\definemargindata [inouteredge] [\v!outer] [\c!margin=\v!edge ,\c!width=\outeredgewidth ,\c!style=,\c!color=,\c!category=\v!edge]
-\definemargindata [ininneredge] [\v!inner] [\c!margin=\v!edge ,\c!width=\inneredgewidth ,\c!style=,\c!color=,\c!category=\v!edge]
+\definemargindata [\v!inleftedge] [\v!left ] [\c!margin=\v!edge ,\c!width=\leftedgewidth ,\c!style=,\c!color=,\c!category=\v!edge]
+\definemargindata [\v!inrightedge] [\v!right] [\c!margin=\v!edge ,\c!width=\rightedgewidth ,\c!style=,\c!color=,\c!category=\v!edge]
+\definemargindata [\v!inouteredge] [\v!outer] [\c!margin=\v!edge ,\c!width=\outeredgewidth ,\c!style=,\c!color=,\c!category=\v!edge]
+\definemargindata [\v!ininneredge] [\v!inner] [\c!margin=\v!edge ,\c!width=\inneredgewidth ,\c!style=,\c!color=,\c!category=\v!edge]
-\definemargindata [atleftmargin] [\v!left ] [\c!margin=\v!normal,\c!width=\leftmarginwidth ,\c!style=,\c!color=]
-\definemargindata [atrightmargin] [\v!right] [\c!margin=\v!normal,\c!width=\rightmarginwidth,\c!style=,\c!color=]
+\definemargindata [\v!atleftmargin] [\v!left ] [\c!margin=\v!normal,\c!width=\leftmarginwidth ,\c!style=,\c!color=]
+\definemargindata [\v!atrightmargin] [\v!right] [\c!margin=\v!normal,\c!width=\rightmarginwidth,\c!style=,\c!color=]
-% text
+% text: \v!added
-\definemargindata [inleft] [\v!left ] [\c!margin=\v!margin,\c!width=\leftmarginwidth ,\c!align=\v!flushright]
-\definemargindata [inright] [\v!right] [\c!margin=\v!margin,\c!width=\rightmarginwidth,\c!align=\v!flushleft]
-\definemargindata [inouter] [\v!outer] [\c!margin=\v!margin,\c!width=\outermarginwidth,\c!align=\v!inner]
-\definemargindata [ininner] [\v!inner] [\c!margin=\v!margin,\c!width=\innermarginwidth,\c!align=\v!outer]
+\definemargindata [\v!inleft] [\v!left ] [\c!margin=\v!margin,\c!width=\leftmarginwidth ,\c!align=\v!flushright]
+\definemargindata [\v!inright] [\v!right] [\c!margin=\v!margin,\c!width=\rightmarginwidth,\c!align=\v!flushleft]
+\definemargindata [\v!inouter] [\v!outer] [\c!margin=\v!margin,\c!width=\outermarginwidth,\c!align=\v!inner]
+\definemargindata [\v!ininner] [\v!inner] [\c!margin=\v!margin,\c!width=\innermarginwidth,\c!align=\v!outer]
% no longer auto auto-other
-\definemargindata [inmargin] [\v!left] [\c!margin=\v!margin,\c!width=\leftmarginwidth, \c!align=\v!flushright]
-\definemargindata [inother] [\v!right] [\c!margin=\v!margin,\c!width=\rightmarginwidth,\c!align=\v!flushleft]
+\definemargindata [\v!inmargin] [\v!left] [\c!margin=\v!margin,\c!width=\leftmarginwidth, \c!align=\v!flushright]
+\definemargindata [\v!inother] [\v!right] [\c!margin=\v!margin,\c!width=\rightmarginwidth,\c!align=\v!flushleft]
-\definemargindata [margintext] [\v!left] [\c!margin=\v!margin,\c!width=\leftmarginwidth, \c!align=\v!flushright,\c!stack=\v!yes]
+\definemargindata [\v!margintext] [\v!left] [\c!margin=\v!margin,\c!width=\leftmarginwidth, \c!align=\v!flushright,\c!stack=\v!yes]
\setupmarginframed [\v!left ] [\c!method=\v!first,\c!align=\v!flushright,\s!parent=\??marginframed] % we could autoparent when no define yet
\setupmarginframed [\v!right] [\c!method=\v!first,\c!align=\v!flushleft, \s!parent=\??marginframed]
\setupmarginframed [\v!outer] [\c!method=\v!first,\c!align=\v!inner, \s!parent=\??marginframed]
\setupmarginframed [\v!inner] [\c!method=\v!first,\c!align=\v!outer, \s!parent=\??marginframed]
-\definemarginframed [inleft] [\v!left ]
-\definemarginframed [inright] [\v!right]
-\definemarginframed [inouter] [\v!outer]
-\definemarginframed [ininner] [\v!inner]
-\definemarginframed [inmargin] [\v!inleft]
-\definemarginframed [inother] [\v!inright]
+\definemarginframed [\v!inleft] [\v!left ]
+\definemarginframed [\v!inright] [\v!right]
+\definemarginframed [\v!inouter] [\v!outer]
+\definemarginframed [\v!ininner] [\v!inner]
+\definemarginframed [\v!inmargin] [\v!inleft]
+\definemarginframed [\v!inother] [\v!inright]
\let\marginword \margintext
\let\margintitle \margintext
diff --git a/tex/context/base/util-sto.lua b/tex/context/base/util-sto.lua
index 8aafca425..d21267d7a 100644
--- a/tex/context/base/util-sto.lua
+++ b/tex/context/base/util-sto.lua
@@ -113,22 +113,16 @@ local f_index = {
["number"] = f_number,
}
-local t_index = {
- ["empty"] = { __index = f_empty },
- ["self"] = { __index = f_self },
- ["table"] = { __index = f_table },
- ["number"] = { __index = f_number },
-}
-
function table.setmetatableindex(t,f)
if type(t) ~= "table" then
f, t = t, { }
end
local m = getmetatable(t)
+ local i = f_index[f] or f
if m then
- m.__index = f_index[f] or f
+ m.__index = i
else
- setmetatable(t,t_index[f] or { __index = f })
+ setmetatable(t,{ __index = i })
end
return t
end
@@ -137,19 +131,16 @@ local f_index = {
["ignore"] = f_ignore,
}
-local t_index = {
- ["ignore"] = { __newindex = f_ignore },
-}
-
function table.setmetatablenewindex(t,f)
if type(t) ~= "table" then
f, t = t, { }
end
local m = getmetatable(t)
+ local i = f_index[f] or f
if m then
- m.__newindex = f_index[f] or f
+ m.__newindex = i
else
- setmetatable(t,t_index[f] or { __newindex = f })
+ setmetatable(t,{ __newindex = i })
end
return t
end
diff --git a/tex/context/base/x-set-11.mkiv b/tex/context/base/x-set-11.mkiv
index 73e68e073..5031eff2d 100644
--- a/tex/context/base/x-set-11.mkiv
+++ b/tex/context/base/x-set-11.mkiv
@@ -783,12 +783,12 @@
% A prelude to a rewrite and some more:
-\definetype[parametercommand][type]
-\definetype[parameterkey] [type]
-\definetype[parametervalue] [type][space=on]
+\definetype[parametercommand][\v!type]
+\definetype[parameterkey] [\v!type]
+\definetype[parametervalue] [\v!type][\c!space=\v!on]
-\setuptype[parametercommand] [color=darkmagenta]
-\setuptype[parametervalue] [color=darkyellow]
+\setuptype [parametercommand] [\c!color=darkmagenta]
+\setuptype [parametervalue] [\c!color=darkyellow]
\startxmlsetups xml:setups:parameters:value
\edef\currentsetupparameterkey {\xmlatt{#1}{name}}
diff --git a/tex/context/interface/keys-cs.xml b/tex/context/interface/keys-cs.xml
index c8e7e7517..5acf71e88 100644
--- a/tex/context/interface/keys-cs.xml
+++ b/tex/context/interface/keys-cs.xml
@@ -81,8 +81,10 @@
<cd:variable name='appendices' value='dodatky'/>
<cd:variable name='appendix' value='dodatek'/>
<cd:variable name='april' value='duben'/>
+ <cd:variable name='atleftmargin' value='atleftmargin'/>
<cd:variable name='atmargin' value='naokraji'/>
<cd:variable name='atpage' value='nastrance'/>
+ <cd:variable name='atrightmargin' value='atrightmargin'/>
<cd:variable name='attachment' value='attachment'/>
<cd:variable name='august' value='srpen'/>
<cd:variable name='author' value='autor'/>
@@ -226,12 +228,23 @@
<cd:variable name='indices' value='rejstriky'/>
<cd:variable name='informeel' value='informeel'/>
<cd:variable name='inherit' value='inherit'/>
+ <cd:variable name='ininner' value='ininner'/>
+ <cd:variable name='ininneredge' value='ininneredge'/>
+ <cd:variable name='ininnermargin' value='ininnermargin'/>
<cd:variable name='inleft' value='vlevo'/>
+ <cd:variable name='inleftedge' value='nalevo'/>
+ <cd:variable name='inleftmargin' value='nalevyokraj'/>
<cd:variable name='inmargin' value='naokraji'/>
<cd:variable name='inner' value='uvnitr'/>
<cd:variable name='inneredge' value='inneredge'/>
<cd:variable name='innermargin' value='innermargin'/>
+ <cd:variable name='inother' value='inother'/>
+ <cd:variable name='inouter' value='inouter'/>
+ <cd:variable name='inouteredge' value='inouteredge'/>
+ <cd:variable name='inoutermargin' value='inoutermargin'/>
<cd:variable name='inright' value='vpravo'/>
+ <cd:variable name='inrightedge' value='napravo'/>
+ <cd:variable name='inrightmargin' value='napravyokraj'/>
<cd:variable name='interaction' value='interakce'/>
<cd:variable name='interactionmenu' value='interaktivnimenu'/>
<cd:variable name='intermezzi' value='intermezzi'/>
@@ -283,6 +296,7 @@
<cd:variable name='march' value='brezen'/>
<cd:variable name='margin' value='marginalie'/>
<cd:variable name='marginedge' value='textovahrana'/>
+ <cd:variable name='margintext' value='marginalnitext'/>
<cd:variable name='margintitle' value='titulmarginalie'/>
<cd:variable name='marking' value='znaceni'/>
<cd:variable name='math' value='math'/>
@@ -545,7 +559,7 @@
<cd:variable name='xml' value='xml'/>
<cd:variable name='year' value='rok'/>
<cd:variable name='yes' value='ano'/>
- </cd:variable>
+ </cd:variables>
<!-- definitions for interface constants for language cs -->
@@ -1133,7 +1147,7 @@
<cd:constant name='yoffset' value='yoffset'/>
<cd:constant name='yscale' value='ymeritko'/>
<cd:constant name='ystep' value='ykrok'/>
- </cd:constant>
+ </cd:constants>
<!-- definitions for interface elements for language cs -->
@@ -1164,7 +1178,7 @@
<cd:element name='stop' value='stop'/>
<cd:element name='text' value='text'/>
<cd:element name='type' value='opis'/>
- </cd:element>
+ </cd:elements>
<!-- definitions for interface commands for language cs -->
@@ -1820,6 +1834,6 @@
<cd:command name='writetolist' value='zapisdoseznamu'/>
<cd:command name='writetoreferencelist' value='zapisdoseznamuodkazu'/>
<cd:command name='writetoregister' value='zapisdorejstriku'/>
- </cd:command>
+ </cd:commands>
</cd:interface> \ No newline at end of file
diff --git a/tex/context/interface/keys-de.xml b/tex/context/interface/keys-de.xml
index a100a938f..6c3bd5363 100644
--- a/tex/context/interface/keys-de.xml
+++ b/tex/context/interface/keys-de.xml
@@ -81,8 +81,10 @@
<cd:variable name='appendices' value='anhaenge'/>
<cd:variable name='appendix' value='anhang'/>
<cd:variable name='april' value='april'/>
+ <cd:variable name='atleftmargin' value='atleftmargin'/>
<cd:variable name='atmargin' value='amrand'/>
<cd:variable name='atpage' value='aufseite'/>
+ <cd:variable name='atrightmargin' value='atrightmargin'/>
<cd:variable name='attachment' value='attachment'/>
<cd:variable name='august' value='august'/>
<cd:variable name='author' value='autor'/>
@@ -226,12 +228,23 @@
<cd:variable name='indices' value='indizies'/>
<cd:variable name='informeel' value='informeel'/>
<cd:variable name='inherit' value='inherit'/>
+ <cd:variable name='ininner' value='ininner'/>
+ <cd:variable name='ininneredge' value='ininneredge'/>
+ <cd:variable name='ininnermargin' value='ininnermargin'/>
<cd:variable name='inleft' value='imlinken'/>
+ <cd:variable name='inleftedge' value='imlinkenrand'/>
+ <cd:variable name='inleftmargin' value='inlinkermarginale'/>
<cd:variable name='inmargin' value='imrand'/>
<cd:variable name='inner' value='innen'/>
<cd:variable name='inneredge' value='inneredge'/>
<cd:variable name='innermargin' value='innermargin'/>
+ <cd:variable name='inother' value='inother'/>
+ <cd:variable name='inouter' value='inouter'/>
+ <cd:variable name='inouteredge' value='inouteredge'/>
+ <cd:variable name='inoutermargin' value='inoutermargin'/>
<cd:variable name='inright' value='imrechten'/>
+ <cd:variable name='inrightedge' value='imrechtenrand'/>
+ <cd:variable name='inrightmargin' value='inrechtermarginale'/>
<cd:variable name='interaction' value='interaktion'/>
<cd:variable name='interactionmenu' value='interaktionsmenue'/>
<cd:variable name='intermezzi' value='intermezzi'/>
@@ -283,6 +296,7 @@
<cd:variable name='march' value='maerz'/>
<cd:variable name='margin' value='marginalie'/>
<cd:variable name='marginedge' value='marginalkante'/>
+ <cd:variable name='margintext' value='marginaltext'/>
<cd:variable name='margintitle' value='marginaltitel'/>
<cd:variable name='marking' value='beschriftung'/>
<cd:variable name='math' value='math'/>
@@ -545,7 +559,7 @@
<cd:variable name='xml' value='xml'/>
<cd:variable name='year' value='jahr'/>
<cd:variable name='yes' value='ja'/>
- </cd:variable>
+ </cd:variables>
<!-- definitions for interface constants for language de -->
@@ -1133,7 +1147,7 @@
<cd:constant name='yoffset' value='yoffset'/>
<cd:constant name='yscale' value='yformat'/>
<cd:constant name='ystep' value='yschritt'/>
- </cd:constant>
+ </cd:constants>
<!-- definitions for interface elements for language de -->
@@ -1164,7 +1178,7 @@
<cd:element name='stop' value='stop'/>
<cd:element name='text' value='text'/>
<cd:element name='type' value='type'/>
- </cd:element>
+ </cd:elements>
<!-- definitions for interface commands for language de -->
@@ -1820,6 +1834,6 @@
<cd:command name='writetolist' value='schreibezurliste'/>
<cd:command name='writetoreferencelist' value='schreibezurreferenzliste'/>
<cd:command name='writetoregister' value='schreibezumregister'/>
- </cd:command>
+ </cd:commands>
</cd:interface> \ No newline at end of file
diff --git a/tex/context/interface/keys-en.xml b/tex/context/interface/keys-en.xml
index 0e51dfc7c..fe3d583b8 100644
--- a/tex/context/interface/keys-en.xml
+++ b/tex/context/interface/keys-en.xml
@@ -81,8 +81,10 @@
<cd:variable name='appendices' value='appendices'/>
<cd:variable name='appendix' value='appendix'/>
<cd:variable name='april' value='april'/>
+ <cd:variable name='atleftmargin' value='atleftmargin'/>
<cd:variable name='atmargin' value='atmargin'/>
<cd:variable name='atpage' value='atpage'/>
+ <cd:variable name='atrightmargin' value='atrightmargin'/>
<cd:variable name='attachment' value='attachment'/>
<cd:variable name='august' value='august'/>
<cd:variable name='author' value='author'/>
@@ -226,12 +228,23 @@
<cd:variable name='indices' value='indices'/>
<cd:variable name='informeel' value='informeel'/>
<cd:variable name='inherit' value='inherit'/>
+ <cd:variable name='ininner' value='ininner'/>
+ <cd:variable name='ininneredge' value='ininneredge'/>
+ <cd:variable name='ininnermargin' value='ininnermargin'/>
<cd:variable name='inleft' value='inleft'/>
+ <cd:variable name='inleftedge' value='inleftedge'/>
+ <cd:variable name='inleftmargin' value='inleftmargin'/>
<cd:variable name='inmargin' value='inmargin'/>
<cd:variable name='inner' value='inner'/>
<cd:variable name='inneredge' value='inneredge'/>
<cd:variable name='innermargin' value='innermargin'/>
+ <cd:variable name='inother' value='inother'/>
+ <cd:variable name='inouter' value='inouter'/>
+ <cd:variable name='inouteredge' value='inouteredge'/>
+ <cd:variable name='inoutermargin' value='inoutermargin'/>
<cd:variable name='inright' value='inright'/>
+ <cd:variable name='inrightedge' value='inrightedge'/>
+ <cd:variable name='inrightmargin' value='inrightmargin'/>
<cd:variable name='interaction' value='interaction'/>
<cd:variable name='interactionmenu' value='interactionmenu'/>
<cd:variable name='intermezzi' value='intermezzi'/>
@@ -283,6 +296,7 @@
<cd:variable name='march' value='march'/>
<cd:variable name='margin' value='margin'/>
<cd:variable name='marginedge' value='marginedge'/>
+ <cd:variable name='margintext' value='margintext'/>
<cd:variable name='margintitle' value='margintitle'/>
<cd:variable name='marking' value='marking'/>
<cd:variable name='math' value='math'/>
@@ -545,7 +559,7 @@
<cd:variable name='xml' value='xml'/>
<cd:variable name='year' value='year'/>
<cd:variable name='yes' value='yes'/>
- </cd:variable>
+ </cd:variables>
<!-- definitions for interface constants for language en -->
@@ -1133,7 +1147,7 @@
<cd:constant name='yoffset' value='yoffset'/>
<cd:constant name='yscale' value='yscale'/>
<cd:constant name='ystep' value='ystep'/>
- </cd:constant>
+ </cd:constants>
<!-- definitions for interface elements for language en -->
@@ -1164,7 +1178,7 @@
<cd:element name='stop' value='stop'/>
<cd:element name='text' value='text'/>
<cd:element name='type' value='type'/>
- </cd:element>
+ </cd:elements>
<!-- definitions for interface commands for language en -->
@@ -1820,6 +1834,6 @@
<cd:command name='writetolist' value='writetolist'/>
<cd:command name='writetoreferencelist' value='writetoreferencelist'/>
<cd:command name='writetoregister' value='writetoregister'/>
- </cd:command>
+ </cd:commands>
</cd:interface> \ No newline at end of file
diff --git a/tex/context/interface/keys-fr.xml b/tex/context/interface/keys-fr.xml
index cd35ad7e1..dbcb8ba47 100644
--- a/tex/context/interface/keys-fr.xml
+++ b/tex/context/interface/keys-fr.xml
@@ -81,8 +81,10 @@
<cd:variable name='appendices' value='annexes'/>
<cd:variable name='appendix' value='annexe'/>
<cd:variable name='april' value='avril'/>
+ <cd:variable name='atleftmargin' value='atleftmargin'/>
<cd:variable name='atmargin' value='alamarge'/>
<cd:variable name='atpage' value='alapage'/>
+ <cd:variable name='atrightmargin' value='atrightmargin'/>
<cd:variable name='attachment' value='attachment'/>
<cd:variable name='august' value='août'/>
<cd:variable name='author' value='auteur'/>
@@ -226,12 +228,23 @@
<cd:variable name='indices' value='indices'/>
<cd:variable name='informeel' value='informeel'/>
<cd:variable name='inherit' value='herite'/>
+ <cd:variable name='ininner' value='ininner'/>
+ <cd:variable name='ininneredge' value='ininneredge'/>
+ <cd:variable name='ininnermargin' value='ininnermargin'/>
<cd:variable name='inleft' value='dansgauche'/>
+ <cd:variable name='inleftedge' value='dansbordgauche'/>
+ <cd:variable name='inleftmargin' value='dansmargegauche'/>
<cd:variable name='inmargin' value='dansmarge'/>
<cd:variable name='inner' value='interieur'/>
<cd:variable name='inneredge' value='bordinterieur'/>
<cd:variable name='innermargin' value='margeinterieure'/>
+ <cd:variable name='inother' value='inother'/>
+ <cd:variable name='inouter' value='inouter'/>
+ <cd:variable name='inouteredge' value='inouteredge'/>
+ <cd:variable name='inoutermargin' value='inoutermargin'/>
<cd:variable name='inright' value='dansdroit'/>
+ <cd:variable name='inrightedge' value='dansborddroit'/>
+ <cd:variable name='inrightmargin' value='dansmargedroite'/>
<cd:variable name='interaction' value='interaction'/>
<cd:variable name='interactionmenu' value='menuinteraction'/>
<cd:variable name='intermezzi' value='intermezzi'/>
@@ -283,6 +296,7 @@
<cd:variable name='march' value='mars'/>
<cd:variable name='margin' value='marge'/>
<cd:variable name='marginedge' value='bordmarge'/>
+ <cd:variable name='margintext' value='textemarge'/>
<cd:variable name='margintitle' value='titremarge'/>
<cd:variable name='marking' value='marquage'/>
<cd:variable name='math' value='math'/>
@@ -545,7 +559,7 @@
<cd:variable name='xml' value='xml'/>
<cd:variable name='year' value='annee'/>
<cd:variable name='yes' value='oui'/>
- </cd:variable>
+ </cd:variables>
<!-- definitions for interface constants for language fr -->
@@ -1133,7 +1147,7 @@
<cd:constant name='yoffset' value='yoffset'/>
<cd:constant name='yscale' value='yscale'/>
<cd:constant name='ystep' value='ystep'/>
- </cd:constant>
+ </cd:constants>
<!-- definitions for interface elements for language fr -->
@@ -1164,7 +1178,7 @@
<cd:element name='stop' value='stoppe'/>
<cd:element name='text' value='texte'/>
<cd:element name='type' value='type'/>
- </cd:element>
+ </cd:elements>
<!-- definitions for interface commands for language fr -->
@@ -1820,6 +1834,6 @@
<cd:command name='writetolist' value='ecritdansliste'/>
<cd:command name='writetoreferencelist' value='ecritdanslistereference'/>
<cd:command name='writetoregister' value='ecritregistre'/>
- </cd:command>
+ </cd:commands>
</cd:interface> \ No newline at end of file
diff --git a/tex/context/interface/keys-it.xml b/tex/context/interface/keys-it.xml
index f07dbb5e6..bb719007b 100644
--- a/tex/context/interface/keys-it.xml
+++ b/tex/context/interface/keys-it.xml
@@ -81,8 +81,10 @@
<cd:variable name='appendices' value='appendici'/>
<cd:variable name='appendix' value='appendice'/>
<cd:variable name='april' value='aprile'/>
+ <cd:variable name='atleftmargin' value='atleftmargin'/>
<cd:variable name='atmargin' value='almargine'/>
<cd:variable name='atpage' value='apagina'/>
+ <cd:variable name='atrightmargin' value='atrightmargin'/>
<cd:variable name='attachment' value='attachment'/>
<cd:variable name='august' value='agosto'/>
<cd:variable name='author' value='autore'/>
@@ -226,12 +228,23 @@
<cd:variable name='indices' value='indicianalitici'/>
<cd:variable name='informeel' value='informeel'/>
<cd:variable name='inherit' value='inherit'/>
+ <cd:variable name='ininner' value='ininner'/>
+ <cd:variable name='ininneredge' value='ininneredge'/>
+ <cd:variable name='ininnermargin' value='ininnermargin'/>
<cd:variable name='inleft' value='insinistra'/>
+ <cd:variable name='inleftedge' value='inlatosinistro'/>
+ <cd:variable name='inleftmargin' value='inmarginesinistro'/>
<cd:variable name='inmargin' value='inmargine'/>
<cd:variable name='inner' value='interno'/>
<cd:variable name='inneredge' value='bordointerno'/>
<cd:variable name='innermargin' value='margineinterno'/>
+ <cd:variable name='inother' value='inother'/>
+ <cd:variable name='inouter' value='inouter'/>
+ <cd:variable name='inouteredge' value='inouteredge'/>
+ <cd:variable name='inoutermargin' value='inoutermargin'/>
<cd:variable name='inright' value='indestra'/>
+ <cd:variable name='inrightedge' value='inlatodestro'/>
+ <cd:variable name='inrightmargin' value='inmarginedestro'/>
<cd:variable name='interaction' value='interazione'/>
<cd:variable name='interactionmenu' value='menuinterattivo'/>
<cd:variable name='intermezzi' value='intermezzi'/>
@@ -283,6 +296,7 @@
<cd:variable name='march' value='marzo'/>
<cd:variable name='margin' value='margine'/>
<cd:variable name='marginedge' value='bordomargine'/>
+ <cd:variable name='margintext' value='testoinmargine'/>
<cd:variable name='margintitle' value='titoloinmargine'/>
<cd:variable name='marking' value='marcatura'/>
<cd:variable name='math' value='math'/>
@@ -545,7 +559,7 @@
<cd:variable name='xml' value='xml'/>
<cd:variable name='year' value='anno'/>
<cd:variable name='yes' value='si'/>
- </cd:variable>
+ </cd:variables>
<!-- definitions for interface constants for language it -->
@@ -1133,7 +1147,7 @@
<cd:constant name='yoffset' value='yoffset'/>
<cd:constant name='yscale' value='yscale'/>
<cd:constant name='ystep' value='ystep'/>
- </cd:constant>
+ </cd:constants>
<!-- definitions for interface elements for language it -->
@@ -1164,7 +1178,7 @@
<cd:element name='stop' value='termina'/>
<cd:element name='text' value='testo'/>
<cd:element name='type' value='type'/>
- </cd:element>
+ </cd:elements>
<!-- definitions for interface commands for language it -->
@@ -1820,6 +1834,6 @@
<cd:command name='writetolist' value='scriviinelenco'/>
<cd:command name='writetoreferencelist' value='scriviinlistariferimenti'/>
<cd:command name='writetoregister' value='scriviinregistro'/>
- </cd:command>
+ </cd:commands>
</cd:interface> \ No newline at end of file
diff --git a/tex/context/interface/keys-nl.xml b/tex/context/interface/keys-nl.xml
index f32d79275..9a9a1db39 100644
--- a/tex/context/interface/keys-nl.xml
+++ b/tex/context/interface/keys-nl.xml
@@ -81,8 +81,10 @@
<cd:variable name='appendices' value='bijlagen'/>
<cd:variable name='appendix' value='bijlage'/>
<cd:variable name='april' value='april'/>
+ <cd:variable name='atleftmargin' value='oplinkermarge'/>
<cd:variable name='atmargin' value='opmarge'/>
<cd:variable name='atpage' value='oppagina'/>
+ <cd:variable name='atrightmargin' value='oprechtermarge'/>
<cd:variable name='attachment' value='aanhangsel'/>
<cd:variable name='august' value='augustus'/>
<cd:variable name='author' value='auteur'/>
@@ -226,12 +228,23 @@
<cd:variable name='indices' value='indices'/>
<cd:variable name='informeel' value='informeel'/>
<cd:variable name='inherit' value='erf'/>
+ <cd:variable name='ininner' value='inbinnen'/>
+ <cd:variable name='ininneredge' value='ininneredge'/>
+ <cd:variable name='ininnermargin' value='ininnermargin'/>
<cd:variable name='inleft' value='inlinker'/>
+ <cd:variable name='inleftedge' value='inlinkerrand'/>
+ <cd:variable name='inleftmargin' value='inlinkermarge'/>
<cd:variable name='inmargin' value='inmarge'/>
<cd:variable name='inner' value='binnen'/>
<cd:variable name='inneredge' value='binnenrand'/>
<cd:variable name='innermargin' value='binnenmarge'/>
+ <cd:variable name='inother' value='inandere'/>
+ <cd:variable name='inouter' value='inbuiten'/>
+ <cd:variable name='inouteredge' value='inouteredge'/>
+ <cd:variable name='inoutermargin' value='inoutermargin'/>
<cd:variable name='inright' value='inrechter'/>
+ <cd:variable name='inrightedge' value='inrechterrand'/>
+ <cd:variable name='inrightmargin' value='inrechtermarge'/>
<cd:variable name='interaction' value='interactie'/>
<cd:variable name='interactionmenu' value='interactiemenu'/>
<cd:variable name='intermezzi' value='intermezzos'/>
@@ -283,6 +296,7 @@
<cd:variable name='march' value='maart'/>
<cd:variable name='margin' value='marge'/>
<cd:variable name='marginedge' value='kantlijn'/>
+ <cd:variable name='margintext' value='margetekst'/>
<cd:variable name='margintitle' value='margetitel'/>
<cd:variable name='marking' value='markering'/>
<cd:variable name='math' value='math'/>
@@ -545,7 +559,7 @@
<cd:variable name='xml' value='xml'/>
<cd:variable name='year' value='jaar'/>
<cd:variable name='yes' value='ja'/>
- </cd:variable>
+ </cd:variables>
<!-- definitions for interface constants for language nl -->
@@ -1133,7 +1147,7 @@
<cd:constant name='yoffset' value='yoffset'/>
<cd:constant name='yscale' value='yschaal'/>
<cd:constant name='ystep' value='ystap'/>
- </cd:constant>
+ </cd:constants>
<!-- definitions for interface elements for language nl -->
@@ -1164,7 +1178,7 @@
<cd:element name='stop' value='stop'/>
<cd:element name='text' value='tekst'/>
<cd:element name='type' value='type'/>
- </cd:element>
+ </cd:elements>
<!-- definitions for interface commands for language nl -->
@@ -1820,6 +1834,6 @@
<cd:command name='writetolist' value='schrijfnaarlijst'/>
<cd:command name='writetoreferencelist' value='schrijfnaarreferentielijst'/>
<cd:command name='writetoregister' value='schrijfnaarregister'/>
- </cd:command>
+ </cd:commands>
</cd:interface> \ No newline at end of file
diff --git a/tex/context/interface/keys-pe.xml b/tex/context/interface/keys-pe.xml
index a55ad78ce..0891b475b 100644
--- a/tex/context/interface/keys-pe.xml
+++ b/tex/context/interface/keys-pe.xml
@@ -81,8 +81,10 @@
<cd:variable name='appendices' value='پیوستها'/>
<cd:variable name='appendix' value='پیوست'/>
<cd:variable name='april' value='آوریل'/>
+ <cd:variable name='atleftmargin' value='درحاشیه‌چپ'/>
<cd:variable name='atmargin' value='درحاشیه'/>
<cd:variable name='atpage' value='درصفحه'/>
+ <cd:variable name='atrightmargin' value='درحاشیه‌راست'/>
<cd:variable name='attachment' value='attachment'/>
<cd:variable name='august' value='آگوست'/>
<cd:variable name='author' value='author'/>
@@ -226,12 +228,23 @@
<cd:variable name='indices' value='نمایه‌ها'/>
<cd:variable name='informeel' value='informeel'/>
<cd:variable name='inherit' value='ارث‌بردن'/>
+ <cd:variable name='ininner' value='درداخلی'/>
+ <cd:variable name='ininneredge' value='ininneredge'/>
+ <cd:variable name='ininnermargin' value='ininnermargin'/>
<cd:variable name='inleft' value='درون‌چپ'/>
+ <cd:variable name='inleftedge' value='درلبه‌چپ'/>
+ <cd:variable name='inleftmargin' value='درحاشیه‌چپ'/>
<cd:variable name='inmargin' value='درون‌حاشیه'/>
<cd:variable name='inner' value='داخلی'/>
<cd:variable name='inneredge' value='لبه‌داخلی'/>
<cd:variable name='innermargin' value='حاشیه‌داخلی'/>
+ <cd:variable name='inother' value='inother'/>
+ <cd:variable name='inouter' value='درخارجی'/>
+ <cd:variable name='inouteredge' value='inouteredge'/>
+ <cd:variable name='inoutermargin' value='inoutermargin'/>
<cd:variable name='inright' value='درون‌راست'/>
+ <cd:variable name='inrightedge' value='درلبه‌راست'/>
+ <cd:variable name='inrightmargin' value='درحاشیه‌راست'/>
<cd:variable name='interaction' value='پانل'/>
<cd:variable name='interactionmenu' value='منوی‌پانل'/>
<cd:variable name='intermezzi' value='میان‌پرده‌ها'/>
@@ -283,6 +296,7 @@
<cd:variable name='march' value='مارس'/>
<cd:variable name='margin' value='حاشیه'/>
<cd:variable name='marginedge' value='لبه‌حاشیه'/>
+ <cd:variable name='margintext' value='متن‌حاشیه'/>
<cd:variable name='margintitle' value='عنوان‌حاشیه'/>
<cd:variable name='marking' value='نشانه‌گذاری'/>
<cd:variable name='math' value='math'/>
@@ -545,7 +559,7 @@
<cd:variable name='xml' value='xml'/>
<cd:variable name='year' value='سال'/>
<cd:variable name='yes' value='بله'/>
- </cd:variable>
+ </cd:variables>
<!-- definitions for interface constants for language pe -->
@@ -1133,7 +1147,7 @@
<cd:constant name='yoffset' value='آفست‌وای'/>
<cd:constant name='yscale' value='مقیاس‌وای'/>
<cd:constant name='ystep' value='گام‌وای'/>
- </cd:constant>
+ </cd:constants>
<!-- definitions for interface elements for language pe -->
@@ -1164,7 +1178,7 @@
<cd:element name='stop' value='پایان'/>
<cd:element name='text' value='متن'/>
<cd:element name='type' value='تایپ'/>
- </cd:element>
+ </cd:elements>
<!-- definitions for interface commands for language pe -->
@@ -1820,6 +1834,6 @@
<cd:command name='writetolist' value='بنویس‌در‌لیست'/>
<cd:command name='writetoreferencelist' value='بنویس‌درلیست‌مرجع'/>
<cd:command name='writetoregister' value='بنویس‌درثبت'/>
- </cd:command>
+ </cd:commands>
</cd:interface> \ No newline at end of file
diff --git a/tex/context/interface/keys-ro.xml b/tex/context/interface/keys-ro.xml
index 951a5e8c9..a4c6d8b1c 100644
--- a/tex/context/interface/keys-ro.xml
+++ b/tex/context/interface/keys-ro.xml
@@ -81,8 +81,10 @@
<cd:variable name='appendices' value='apendixuri'/>
<cd:variable name='appendix' value='apendix'/>
<cd:variable name='april' value='aprilie'/>
+ <cd:variable name='atleftmargin' value='atleftmargin'/>
<cd:variable name='atmargin' value='lamargine'/>
<cd:variable name='atpage' value='lapagina'/>
+ <cd:variable name='atrightmargin' value='atrightmargin'/>
<cd:variable name='attachment' value='attachment'/>
<cd:variable name='august' value='august'/>
<cd:variable name='author' value='autor'/>
@@ -226,12 +228,23 @@
<cd:variable name='indices' value='indexuri'/>
<cd:variable name='informeel' value='informeel'/>
<cd:variable name='inherit' value='inherit'/>
+ <cd:variable name='ininner' value='ininner'/>
+ <cd:variable name='ininneredge' value='ininneredge'/>
+ <cd:variable name='ininnermargin' value='ininnermargin'/>
<cd:variable name='inleft' value='instanga'/>
+ <cd:variable name='inleftedge' value='inparteastanga'/>
+ <cd:variable name='inleftmargin' value='inmargineastanga'/>
<cd:variable name='inmargin' value='inmargine'/>
<cd:variable name='inner' value='intern'/>
<cd:variable name='inneredge' value='inneredge'/>
<cd:variable name='innermargin' value='innermargin'/>
+ <cd:variable name='inother' value='inother'/>
+ <cd:variable name='inouter' value='inouter'/>
+ <cd:variable name='inouteredge' value='inouteredge'/>
+ <cd:variable name='inoutermargin' value='inoutermargin'/>
<cd:variable name='inright' value='indreapta'/>
+ <cd:variable name='inrightedge' value='inparteadreapta'/>
+ <cd:variable name='inrightmargin' value='inmargineadreapta'/>
<cd:variable name='interaction' value='interactiune'/>
<cd:variable name='interactionmenu' value='meniuinteractiune'/>
<cd:variable name='intermezzi' value='intermezzi'/>
@@ -283,6 +296,7 @@
<cd:variable name='march' value='martie'/>
<cd:variable name='margin' value='margine'/>
<cd:variable name='marginedge' value='marginebordura'/>
+ <cd:variable name='margintext' value='textmarginal'/>
<cd:variable name='margintitle' value='titlumarginal'/>
<cd:variable name='marking' value='marcaje'/>
<cd:variable name='math' value='math'/>
@@ -545,7 +559,7 @@
<cd:variable name='xml' value='xml'/>
<cd:variable name='year' value='an'/>
<cd:variable name='yes' value='da'/>
- </cd:variable>
+ </cd:variables>
<!-- definitions for interface constants for language ro -->
@@ -1133,7 +1147,7 @@
<cd:constant name='yoffset' value='yoffset'/>
<cd:constant name='yscale' value='yscala'/>
<cd:constant name='ystep' value='ystep'/>
- </cd:constant>
+ </cd:constants>
<!-- definitions for interface elements for language ro -->
@@ -1164,7 +1178,7 @@
<cd:element name='stop' value='stop'/>
<cd:element name='text' value='text'/>
<cd:element name='type' value='type'/>
- </cd:element>
+ </cd:elements>
<!-- definitions for interface commands for language ro -->
@@ -1820,6 +1834,6 @@
<cd:command name='writetolist' value='scrieinlista'/>
<cd:command name='writetoreferencelist' value='scrieinlistareferinte'/>
<cd:command name='writetoregister' value='scrieinregistru'/>
- </cd:command>
+ </cd:commands>
</cd:interface> \ No newline at end of file
diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua
index c0ad6bda9..7ead58247 100644
--- a/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 07/07/15 21:43:15
+-- merge date : 07/09/15 15:23:36
do -- begin closure to overcome local limits and interference
@@ -7080,7 +7080,7 @@ if not modules then modules={} end modules ['font-otf']={
license="see context related readme files"
}
local utfbyte=utf.byte
-local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
+local gmatch,gsub,find,match,lower,strip=string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
local type,next,tonumber,tostring=type,next,tonumber,tostring
local abs=math.abs
local reversed,concat,insert,remove,sortedkeys=table.reversed,table.concat,table.insert,table.remove,table.sortedkeys
@@ -7110,7 +7110,7 @@ local report_otf=logs.reporter("fonts","otf loading")
local fonts=fonts
local otf=fonts.handlers.otf
otf.glists={ "gsub","gpos" }
-otf.version=2.816
+otf.version=2.817
otf.cache=containers.define("fonts","otf",otf.version,true)
local hashes=fonts.hashes
local definers=fonts.definers
@@ -7291,7 +7291,6 @@ local ordered_enhancers={
"check encoding",
"add duplicates",
"expand lookups",
- "check extra features",
"cleanup tables",
"compact lookups",
"purge names",
@@ -7562,6 +7561,7 @@ function otf.load(filename,sub,featurefile)
applyruntimefixes(filename,data)
end
enhance("add dimensions",data,filename,nil,false)
+enhance("check extra features",data,filename)
if trace_sequences then
showfeatureorder(data,filename)
end
@@ -7722,7 +7722,7 @@ actions["prepare glyphs"]=function(data,filename,raw)
end
if not unicode or unicode==-1 then
if not name then
- name=format("u%06X.ctx",private)
+ name=formatters["u%06X.ctx"](private)
end
unicode=private
unicodes[name]=private
@@ -7733,7 +7733,7 @@ actions["prepare glyphs"]=function(data,filename,raw)
nofnames=nofnames+1
else
if not name then
- name=format("u%06X.ctx",unicode)
+ name=formatters["u%06X.ctx"](unicode)
end
unicodes[name]=unicode
nofunicodes=nofunicodes+1
@@ -7951,7 +7951,7 @@ actions["add duplicates"]=function(data,filename,raw)
end
if u>0 then
local duplicate=table.copy(description)
- duplicate.comment=format("copy of U+%05X",unicode)
+ duplicate.comment=formatters["copy of %U"](unicode)
descriptions[u]=duplicate
if trace_loading then
report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
@@ -9402,6 +9402,32 @@ function otf.scriptandlanguage(tfmdata,attr)
local properties=tfmdata.properties
return properties.script or "dflt",properties.language or "dflt"
end
+local function justset(coverage,unicode,replacement)
+ coverage[unicode]=replacement
+end
+otf.coverup={
+ stepkey="subtables",
+ actions={
+ substitution=justset,
+ alternate=justset,
+ multiple=justset,
+ ligature=justset,
+ },
+ register=function(coverage,descriptions,resources,feature,lookuptype,n)
+ local name=formatters["ctx_%s_%s"](feature,n)
+ resources.lookuptypes[name]=lookuptype
+ for u,c in next,coverage do
+ local description=descriptions[u]
+ local slookups=description.slookups
+ if slookups then
+ slookups[name]=c
+ else
+ description.slookups={ [name]=c }
+ end
+ end
+ return name
+ end
+}
end -- closure