summaryrefslogtreecommitdiff
path: root/tex
diff options
context:
space:
mode:
authorHans Hagen <pragma@wxs.nl>2017-05-27 20:39:37 +0200
committerContext Git Mirror Bot <phg42.2a@gmail.com>2017-05-27 20:39:37 +0200
commit0354ea8393c57ad00606d233468c39928e9d4b4a (patch)
treec087ba71c2e6458671101d3e485edb287dccc153 /tex
parent82aed3e7e8af29f359ebef4f93684d20e98107e6 (diff)
downloadcontext-0354ea8393c57ad00606d233468c39928e9d4b4a.tar.gz
2017-05-27 18:57:00
Diffstat (limited to 'tex')
-rw-r--r--tex/context/base/mkii/cont-new.mkii2
-rw-r--r--tex/context/base/mkii/context.mkii2
-rw-r--r--tex/context/base/mkiv/char-obs.lua269
-rw-r--r--tex/context/base/mkiv/cont-new.mkiv2
-rw-r--r--tex/context/base/mkiv/context.mkiv2
-rw-r--r--tex/context/base/mkiv/font-afk.lua2
-rw-r--r--tex/context/base/mkiv/font-afm.lua1232
-rw-r--r--tex/context/base/mkiv/font-cft.lua6
-rw-r--r--tex/context/base/mkiv/font-cid.lua2
-rw-r--r--tex/context/base/mkiv/font-col.lua10
-rw-r--r--tex/context/base/mkiv/font-con.lua4
-rw-r--r--tex/context/base/mkiv/font-ctx.lua37
-rw-r--r--tex/context/base/mkiv/font-inj.lua1194
-rw-r--r--tex/context/base/mkiv/font-ldr.lua70
-rw-r--r--tex/context/base/mkiv/font-lib.mkvi3
-rw-r--r--tex/context/base/mkiv/font-mat.mkvi131
-rw-r--r--tex/context/base/mkiv/font-ocl.lua2
-rw-r--r--tex/context/base/mkiv/font-odk.lua904
-rw-r--r--tex/context/base/mkiv/font-odv.lua2380
-rw-r--r--tex/context/base/mkiv/font-osd.lua22
-rw-r--r--tex/context/base/mkiv/font-ota.lua2
-rw-r--r--tex/context/base/mkiv/font-otb.lua705
-rw-r--r--tex/context/base/mkiv/font-otc.lua2
-rw-r--r--tex/context/base/mkiv/font-otf.lua2968
-rw-r--r--tex/context/base/mkiv/font-otl.lua2
-rw-r--r--tex/context/base/mkiv/font-otn.lua3927
-rw-r--r--tex/context/base/mkiv/font-oto.lua4
-rw-r--r--tex/context/base/mkiv/font-otp.lua894
-rw-r--r--tex/context/base/mkiv/font-ott.lua2
-rw-r--r--tex/context/base/mkiv/m-oldotf.mkiv77
-rw-r--r--tex/context/base/mkiv/math-fbk.lua210
-rw-r--r--tex/context/base/mkiv/metatex.lus9
-rw-r--r--tex/context/base/mkiv/metatex.tex143
-rw-r--r--tex/context/base/mkiv/node-nut.lua2
-rw-r--r--tex/context/base/mkiv/status-files.pdfbin25616 -> 25828 bytes
-rw-r--r--tex/context/base/mkiv/status-lua.pdfbin424658 -> 424659 bytes
-rw-r--r--tex/context/base/mkiv/status-mkiv.lua7441
-rw-r--r--tex/context/base/mkiv/status-mkiv.tex328
-rw-r--r--tex/context/interface/mkiv/i-context.pdfbin846956 -> 846813 bytes
-rw-r--r--tex/context/interface/mkiv/i-readme.pdfbin60771 -> 60772 bytes
-rw-r--r--tex/context/sample/common/jojomayer.tex3
-rw-r--r--tex/context/sample/common/mcnish.tex73
-rw-r--r--tex/generic/context/luatex/luatex-fonts-merged.lua17
-rw-r--r--tex/generic/context/luatex/luatex-fonts.lua18
44 files changed, 311 insertions, 22792 deletions
diff --git a/tex/context/base/mkii/cont-new.mkii b/tex/context/base/mkii/cont-new.mkii
index 148c8d7c3..954f50c13 100644
--- a/tex/context/base/mkii/cont-new.mkii
+++ b/tex/context/base/mkii/cont-new.mkii
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2017.05.25 12:50}
+\newcontextversion{2017.05.27 18:52}
%D This file is loaded at runtime, thereby providing an
%D excellent place for hacks, patches, extensions and new
diff --git a/tex/context/base/mkii/context.mkii b/tex/context/base/mkii/context.mkii
index 8c265aa46..e332fe5d7 100644
--- a/tex/context/base/mkii/context.mkii
+++ b/tex/context/base/mkii/context.mkii
@@ -20,7 +20,7 @@
%D your styles an modules.
\edef\contextformat {\jobname}
-\edef\contextversion{2017.05.25 12:50}
+\edef\contextversion{2017.05.27 18:52}
%D For those who want to use this:
diff --git a/tex/context/base/mkiv/char-obs.lua b/tex/context/base/mkiv/char-obs.lua
deleted file mode 100644
index 0f0e43d3c..000000000
--- a/tex/context/base/mkiv/char-obs.lua
+++ /dev/null
@@ -1,269 +0,0 @@
-------------------------
------ char-ini.lua -----
-------------------------
-
--- local template_a = "\\startextendcatcodetable{%s}\\chardef\\l=11\\chardef\\a=13\\let\\c\\catcode%s\\let\\a\\undefined\\let\\l\\undefined\\let\\c\\undefined\\stopextendcatcodetable"
--- local template_b = "\\chardef\\l=11\\chardef\\a=13\\let\\c\\catcode%s\\let\\a\\undefined\\let\\l\\undefined\\let\\c\\undefined"
---
--- function characters.define(tobelettered, tobeactivated) -- catcodetables
--- local lettered, activated, l, a = { }, { }, 0, 0
--- for u, chr in next, data do
--- -- we can use a macro instead of direct settings
--- local fallback = chr.fallback
--- if fallback then
--- -- texprint(format("{\\catcode %s=13\\unexpanded\\gdef %s{\\checkedchar{%s}{%s}}}",u,utfchar(u),u,fallback))
--- texsprint("{\\catcode",u,"=13\\unexpanded\\gdef ",utfchar(u),"{\\checkedchar{",u,"}{",fallback,"}}}") -- no texprint
--- a = a + 1
--- activated[a] = "\\c"..u.."\\a"
--- else
--- local contextname = chr.contextname
--- local category = chr.category
--- if contextname then
--- if is_character[category] then
--- -- by this time, we're still in normal catcode mode
--- -- subtle: not "\\",contextname but "\\"..contextname
--- if chr.unicodeslot < 128 then
--- texprint(ctxcatcodes,format("\\chardef\\%s=%s",contextname,u))
--- else
--- texprint(ctxcatcodes,format("\\let\\%s=%s",contextname,utfchar(u)))
--- if is_letter[category] then
--- l = l + 1
--- lettered[l] = "\\c"..u.."\\l"
--- end
--- end
--- elseif is_command[category] then
--- -- this might change: contextcommand ipv contextname
--- -- texprint(format("{\\catcode %s=13\\unexpanded\\gdef %s{\\%s}}",u,utfchar(u),contextname))
--- texsprint("{\\catcode",u,"=13\\unexpanded\\gdef ",utfchar(u),"{\\"..contextname,"}}") -- no texprint
--- a = a + 1
--- activated[a] = "\\c"..u.."\\a"
--- end
--- elseif is_letter[category] then
--- if u >= 128 and u <= 65536 then -- catch private mess
--- l = l + 1
--- lettered[l] = "\\c"..u.."\\l"
--- end
--- end
--- end
--- local range = chr.range
--- if range then
--- l = l + 1
--- lettered[l] = format('\\dofastrecurse{"%05X}{"%05X}{1}{\\c\\fastrecursecounter\\l}',range.first,range.last)
--- end
--- end
--- -- if false then
--- l = l + 1
--- lettered[l] = "\\c"..0x200C.."\\l" -- non-joiner
--- l = l + 1
--- lettered[l] = "\\c"..0x200D.."\\l" -- joiner
--- -- fi
--- if tobelettered then
--- lettered = concat(lettered)
--- if true then
--- texsprint(ctxcatcodes,format(template_b,lettered)) -- global
--- else
--- for l=1,#tobelettered do
--- texsprint(ctxcatcodes,format(template_a,tobelettered[l],lettered))
--- end
--- end
--- end
--- if tobeactivated then
--- activated = concat(activated)
--- for a=1,#tobeactivated do
--- texsprint(ctxcatcodes,format(template_a,tobeactivated[a],activated))
--- end
--- end
--- end
---
--- function characters.setcodes()
--- for code, chr in next, data do
--- local cc = chr.category
--- if cc == 'll' or cc == 'lu' or cc == 'lt' then
--- local lc, uc = chr.lccode, chr.uccode
--- if not lc then chr.lccode, lc = code, code end
--- if not uc then chr.uccode, uc = code, code end
--- texsprint(ctxcatcodes,format("\\setcclcuc{%i}{%i}{%i}",code,lc,uc))
--- end
--- if cc == "lu" then
--- texprint(ctxcatcodes,"\\sfcode ",code,"999 ")
--- end
--- if cc == "lo" then
--- local range = chr.range
--- if range then
--- texsprint(ctxcatcodes,format('\\dofastrecurse{"%05X}{"%05X}{1}{\\setcclcucself\\fastrecursecounter}',range.first,range.last))
--- end
--- end
--- end
--- end
-
--- --[[ldx--
--- <p>The next variant has lazy token collecting, on a 140 page mk.tex this saves
--- about .25 seconds, which is understandable because we have no graphemes and
--- not collecting tokens is not only faster but also saves garbage collecting.
--- </p>
--- --ldx]]--
---
--- function utffilters.collapse(str,filename) -- we can make high a seperate pass (never needed with collapse)
--- if skippable[filesuffix(filename)] then
--- return str
--- -- elseif find(filename,"^virtual://") then
--- -- return str
--- -- else
--- -- -- print("\n"..filename)
--- end
--- if str and str ~= "" then
--- local nstr = #str
--- if nstr > 1 then
--- if initialize then -- saves a call
--- initialize()
--- end
--- local tokens, t, first, done, n = { }, 0, false, false, 0
--- for second in utfcharacters(str) do
--- if done then
--- if first then
--- if second == " " then
--- t = t + 1
--- tokens[t] = first
--- first = second
--- else
--- -- local crs = high[second]
--- -- if crs then
--- -- t = t + 1
--- -- tokens[t] = first
--- -- first = crs
--- -- else
--- local cgf = graphemes[first]
--- if cgf and cgf[second] then
--- first = cgf[second]
--- else
--- t = t + 1
--- tokens[t] = first
--- first = second
--- end
--- -- end
--- end
--- elseif second == " " then
--- first = second
--- else
--- -- local crs = high[second]
--- -- if crs then
--- -- first = crs
--- -- else
--- first = second
--- -- end
--- end
--- elseif second == " " then
--- first = nil
--- n = n + 1
--- else
--- -- local crs = high[second]
--- -- if crs then
--- -- for s in utfcharacters(str) do
--- -- if n == 1 then
--- -- break
--- -- else
--- -- t = t + 1
--- -- tokens[t] = s
--- -- n = n - 1
--- -- end
--- -- end
--- -- if first then
--- -- t = t + 1
--- -- tokens[t] = first
--- -- end
--- -- first = crs
--- -- done = true
--- -- else
--- local cgf = graphemes[first]
--- if cgf and cgf[second] then
--- for s in utfcharacters(str) do
--- if n == 1 then
--- break
--- else
--- t = t + 1
--- tokens[t] = s
--- n = n - 1
--- end
--- end
--- first = cgf[second]
--- done = true
--- else
--- first = second
--- n = n + 1
--- end
--- -- end
--- end
--- end
--- if done then
--- if first then
--- t = t + 1
--- tokens[t] = first
--- end
--- return concat(tokens) -- seldom called
--- end
--- elseif nstr > 0 then
--- return high[str] or str -- this will go from here
--- end
--- end
--- return str
--- end
-
--- function utffilters.decompose(str)
--- if str and str ~= "" then
--- local nstr = #str
--- if nstr > 1 then
--- -- if initialize then -- saves a call
--- -- initialize()
--- -- end
--- local tokens, t, done, n = { }, 0, false, 0
--- for s in utfcharacters(str) do
--- local dec = decomposed[s]
--- if dec then
--- if not done then
--- if n > 0 then
--- for s in utfcharacters(str) do
--- if n == 0 then
--- break
--- else
--- t = t + 1
--- tokens[t] = s
--- n = n - 1
--- end
--- end
--- end
--- done = true
--- end
--- t = t + 1
--- tokens[t] = dec
--- elseif done then
--- t = t + 1
--- tokens[t] = s
--- else
--- n = n + 1
--- end
--- end
--- if done then
--- return concat(tokens) -- seldom called
--- end
--- end
--- end
--- return str
--- end
-
--- local replacer = nil
--- local finder = nil
---
--- function utffilters.decompose(str) -- 3 to 4 times faster than the above
--- if not replacer then
--- if initialize then
--- initialize()
--- end
--- local tree = utfchartabletopattern(decomposed)
--- finder = lpeg.finder(tree,false,true)
--- replacer = lpeg.replacer(tree,decomposed,false,true)
--- end
--- if str and str ~= "" and #str > 1 and lpegmatch(finder,str) then
--- return lpegmatch(replacer,str)
--- end
--- return str
--- end
diff --git a/tex/context/base/mkiv/cont-new.mkiv b/tex/context/base/mkiv/cont-new.mkiv
index a80d41e5e..a59640f48 100644
--- a/tex/context/base/mkiv/cont-new.mkiv
+++ b/tex/context/base/mkiv/cont-new.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2017.05.25 12:50}
+\newcontextversion{2017.05.27 18:52}
%D This file is loaded at runtime, thereby providing an excellent place for
%D hacks, patches, extensions and new features.
diff --git a/tex/context/base/mkiv/context.mkiv b/tex/context/base/mkiv/context.mkiv
index e7037678d..a1160475a 100644
--- a/tex/context/base/mkiv/context.mkiv
+++ b/tex/context/base/mkiv/context.mkiv
@@ -39,7 +39,7 @@
%D up and the dependencies are more consistent.
\edef\contextformat {\jobname}
-\edef\contextversion{2017.05.25 12:50}
+\edef\contextversion{2017.05.27 18:52}
\edef\contextkind {beta}
%D For those who want to use this:
diff --git a/tex/context/base/mkiv/font-afk.lua b/tex/context/base/mkiv/font-afk.lua
index 8b65b0631..761016d34 100644
--- a/tex/context/base/mkiv/font-afk.lua
+++ b/tex/context/base/mkiv/font-afk.lua
@@ -1,6 +1,6 @@
if not modules then modules = { } end modules ['font-afk'] = {
version = 1.001,
- comment = "companion to font-afm.lua",
+ comment = "companion to font-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files",
diff --git a/tex/context/base/mkiv/font-afm.lua b/tex/context/base/mkiv/font-afm.lua
deleted file mode 100644
index a9fbe89f1..000000000
--- a/tex/context/base/mkiv/font-afm.lua
+++ /dev/null
@@ -1,1232 +0,0 @@
-if not modules then modules = { } end modules ['font-afm'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-<p>Some code may look a bit obscure but this has to do with the
-fact that we also use this code for testing and much code evolved
-in the transition from <l n='tfm'/> to <l n='afm'/> to <l
-n='otf'/>.</p>
-
-<p>The following code still has traces of intermediate font support
-where we handles font encodings. Eventually font encoding goes
-away.</p>
-
-<p>The embedding of a font involves creating temporary files and
-depending on your system setup that can fail. It took more than a
-day to figure out why sometimes embedding failed in mingw luatex
-where running on a real path like c:\... failed while running on
-say e:\... being a link worked well. The native windows binaries
-don't have this issue.</p>
-
---ldx]]--
-
-local fonts, logs, trackers, containers, resolvers = fonts, logs, trackers, containers, resolvers
-
-local next, type, tonumber = next, type, tonumber
-local match, gmatch, lower, gsub, strip, find = string.match, string.gmatch, string.lower, string.gsub, string.strip, string.find
-local char, byte, sub = string.char, string.byte, string.sub
-local abs = math.abs
-local bxor, rshift = bit32.bxor, bit32.rshift
-local P, S, R, Cmt, C, Ct, Cs, Cg, Cf, lpegmatch, patterns = lpeg.P, lpeg.S, lpeg.R, lpeg.Cmt, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Cg, lpeg.Cf, lpeg.match, lpeg.patterns
-local derivetable = table.derive
-
-local trace_features = false trackers.register("afm.features", function(v) trace_features = v end)
-local trace_indexing = false trackers.register("afm.indexing", function(v) trace_indexing = v end)
-local trace_loading = false trackers.register("afm.loading", function(v) trace_loading = v end)
-local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
-
-local report_afm = logs.reporter("fonts","afm loading")
-local report_pfb = logs.reporter("fonts","pfb loading")
-
-local setmetatableindex = table.setmetatableindex
-
-local findbinfile = resolvers.findbinfile
-
-local definers = fonts.definers
-local readers = fonts.readers
-local constructors = fonts.constructors
-
-local afm = constructors.handlers.afm
-local pfb = constructors.handlers.pfb
-
-local afmfeatures = constructors.features.afm
-local registerafmfeature = afmfeatures.register
-
-afm.version = 1.501 -- incrementing this number one up will force a re-cache
-afm.cache = containers.define("fonts", "afm", afm.version, true)
-afm.autoprefixed = true -- this will become false some day (catches texnansi-blabla.*)
-
-afm.helpdata = { } -- set later on so no local for this
-afm.syncspace = true -- when true, nicer stretch values
-afm.addligatures = true -- best leave this set to true
-afm.addtexligatures = true -- best leave this set to true
-afm.addkerns = true -- best leave this set to true
-
-local overloads = fonts.mappings.overloads
-
-local applyruntimefixes = fonts.treatments and fonts.treatments.applyfixes
-
-local function setmode(tfmdata,value)
- if value then
- tfmdata.properties.mode = lower(value)
- end
-end
-
-registerafmfeature {
- name = "mode",
- description = "mode",
- initializers = {
- base = setmode,
- node = setmode,
- }
-}
-
---[[ldx--
-<p>We start with the basic reader which we give a name similar to the
-built in <l n='tfm'/> and <l n='otf'/> reader.</p>
---ldx]]--
-
---~ Comment FONTIDENTIFIER LMMATHSYMBOLS10
---~ Comment CODINGSCHEME TEX MATH SYMBOLS
---~ Comment DESIGNSIZE 10.0 pt
---~ Comment CHECKSUM O 4261307036
---~ Comment SPACE 0 plus 0 minus 0
---~ Comment QUAD 1000
---~ Comment EXTRASPACE 0
---~ Comment NUM 676.508 393.732 443.731
---~ Comment DENOM 685.951 344.841
---~ Comment SUP 412.892 362.892 288.889
---~ Comment SUB 150 247.217
---~ Comment SUPDROP 386.108
---~ Comment SUBDROP 50
---~ Comment DELIM 2390 1010
---~ Comment AXISHEIGHT 250
-
-local comment = P("Comment")
-local spacing = patterns.spacer -- S(" \t")^1
-local lineend = patterns.newline -- S("\n\r")
-local words = C((1 - lineend)^1)
-local number = C((R("09") + S("."))^1) / tonumber * spacing^0
-local data = lpeg.Carg(1)
-
-local pattern = ( -- needs testing ... not used anyway as we no longer need math afm's
- comment * spacing *
- (
- data * (
- ("CODINGSCHEME" * spacing * words ) / function(fd,a) end +
- ("DESIGNSIZE" * spacing * number * words ) / function(fd,a) fd[ 1] = a end +
- ("CHECKSUM" * spacing * number * words ) / function(fd,a) fd[ 2] = a end +
- ("SPACE" * spacing * number * "plus" * number * "minus" * number) / function(fd,a,b,c) fd[ 3], fd[ 4], fd[ 5] = a, b, c end +
- ("QUAD" * spacing * number ) / function(fd,a) fd[ 6] = a end +
- ("EXTRASPACE" * spacing * number ) / function(fd,a) fd[ 7] = a end +
- ("NUM" * spacing * number * number * number ) / function(fd,a,b,c) fd[ 8], fd[ 9], fd[10] = a, b, c end +
- ("DENOM" * spacing * number * number ) / function(fd,a,b ) fd[11], fd[12] = a, b end +
- ("SUP" * spacing * number * number * number ) / function(fd,a,b,c) fd[13], fd[14], fd[15] = a, b, c end +
- ("SUB" * spacing * number * number ) / function(fd,a,b) fd[16], fd[17] = a, b end +
- ("SUPDROP" * spacing * number ) / function(fd,a) fd[18] = a end +
- ("SUBDROP" * spacing * number ) / function(fd,a) fd[19] = a end +
- ("DELIM" * spacing * number * number ) / function(fd,a,b) fd[20], fd[21] = a, b end +
- ("AXISHEIGHT" * spacing * number ) / function(fd,a) fd[22] = a end
- )
- + (1-lineend)^0
- )
- + (1-comment)^1
-)^0
-
-local function scan_comment(str)
- local fd = { }
- lpegmatch(pattern,str,1,fd)
- return fd
-end
-
--- On a rainy day I will rewrite this in lpeg ... or we can use the (slower) fontloader
--- as in now supports afm/pfb loading but it's not too bad to have different methods
--- for testing approaches.
-
-local keys = { }
-
-function keys.FontName (data,line) data.metadata.fontname = strip (line) -- get rid of spaces
- data.metadata.fullname = strip (line) end
-function keys.ItalicAngle (data,line) data.metadata.italicangle = tonumber (line) end
-function keys.IsFixedPitch(data,line) data.metadata.monospaced = toboolean(line,true) end
-function keys.CharWidth (data,line) data.metadata.charwidth = tonumber (line) end
-function keys.XHeight (data,line) data.metadata.xheight = tonumber (line) end
-function keys.Descender (data,line) data.metadata.descender = tonumber (line) end
-function keys.Ascender (data,line) data.metadata.ascender = tonumber (line) end
-function keys.Comment (data,line)
- -- Comment DesignSize 12 (pts)
- -- Comment TFM designsize: 12 (in points)
- line = lower(line)
- local designsize = match(line,"designsize[^%d]*(%d+)")
- if designsize then data.metadata.designsize = tonumber(designsize) end
-end
-
-local function get_charmetrics(data,charmetrics,vector)
- local characters = data.characters
- local chr, ind = { }, 0
- for k, v in gmatch(charmetrics,"([%a]+) +(.-) *;") do
- if k == 'C' then
- v = tonumber(v)
- if v < 0 then
- ind = ind + 1 -- ?
- else
- ind = v
- end
- chr = {
- index = ind
- }
- elseif k == 'WX' then
- chr.width = tonumber(v)
- elseif k == 'N' then
- characters[v] = chr
- elseif k == 'B' then
- local llx, lly, urx, ury = match(v,"^ *(.-) +(.-) +(.-) +(.-)$")
- chr.boundingbox = { tonumber(llx), tonumber(lly), tonumber(urx), tonumber(ury) }
- elseif k == 'L' then
- local plus, becomes = match(v,"^(.-) +(.-)$")
- local ligatures = chr.ligatures
- if ligatures then
- ligatures[plus] = becomes
- else
- chr.ligatures = { [plus] = becomes }
- end
- end
- end
-end
-
-local function get_kernpairs(data,kernpairs)
- local characters = data.characters
- for one, two, value in gmatch(kernpairs,"KPX +(.-) +(.-) +(.-)\n") do
- local chr = characters[one]
- if chr then
- local kerns = chr.kerns
- if kerns then
- kerns[two] = tonumber(value)
- else
- chr.kerns = { [two] = tonumber(value) }
- end
- end
- end
-end
-
-local function get_variables(data,fontmetrics)
- for key, rest in gmatch(fontmetrics,"(%a+) *(.-)[\n\r]") do
- local keyhandler = keys[key]
- if keyhandler then
- keyhandler(data,rest)
- end
- end
-end
-
-local get_indexes
-
-do
-
- -- old font loader
-
- local fontloader = fontloader
- local get_indexes_old = false
-
- if fontloader then
-
- local font_to_table = fontloader.to_table
- local open_font = fontloader.open
- local close_font = fontloader.close
-
- get_indexes_old = function(data,pfbname)
- local pfbblob = open_font(pfbname)
- if pfbblob then
- local characters = data.characters
- local pfbdata = font_to_table(pfbblob)
- if pfbdata then
- local glyphs = pfbdata.glyphs
- if glyphs then
- if trace_loading then
- report_afm("getting index data from %a",pfbname)
- end
- for index, glyph in next, glyphs do
- local name = glyph.name
- if name then
- local char = characters[name]
- if char then
- if trace_indexing then
- report_afm("glyph %a has index %a",name,index)
- end
- char.index = index
- end
- end
- end
- elseif trace_loading then
- report_afm("no glyph data in pfb file %a",pfbname)
- end
- elseif trace_loading then
- report_afm("no data in pfb file %a",pfbname)
- end
- close_font(pfbblob)
- elseif trace_loading then
- report_afm("invalid pfb file %a",pfbname)
- end
- end
-
- end
-
- -- new (unfinished) font loader but i see no differences between
- -- old and new (one bad vector with old)
-
- local n, m
-
- local progress = function(str,position,name,size)
- local forward = position + tonumber(size) + 3 + 2
- n = n + 1
- if n >= m then
- return #str, name
- elseif forward < #str then
- return forward, name
- else
- return #str, name
- end
- end
-
- local initialize = function(str,position,size)
- n = 0
- m = size -- % tonumber(size)
- return position + 1
- end
-
- local charstrings = P("/CharStrings")
- local encoding = P("/Encoding")
- local dup = P("dup")
- local put = P("put")
- local array = P("array")
- local name = P("/") * C((R("az")+R("AZ")+R("09")+S("-_."))^1)
- local digits = R("09")^1
- local cardinal = digits / tonumber
- local spaces = P(" ")^1
- local spacing = patterns.whitespace^0
-
- local p_filternames = Ct (
- (1-charstrings)^0 * charstrings * spaces * Cmt(cardinal,initialize)
- * (Cmt(name * spaces * cardinal, progress) + P(1))^1
- )
-
- -- /Encoding 256 array
- -- 0 1 255 {1 index exch /.notdef put} for
- -- dup 0 /Foo put
-
- local p_filterencoding =
- (1-encoding)^0 * encoding * spaces * digits * spaces * array * (1-dup)^0
- * Cf(
- Ct("") * Cg(spacing * dup * spaces * cardinal * spaces * name * spaces * put)^1
- ,rawset)
-
- -- if one of first 4 not 0-9A-F then binary else hex
-
- local decrypt
-
- do
-
- local r, c1, c2, n = 0, 0, 0, 0
-
- local function step(c)
- local cipher = byte(c)
- local plain = bxor(cipher,rshift(r,8))
- r = ((cipher + r) * c1 + c2) % 65536
- return char(plain)
- end
-
- decrypt = function(binary)
- r, c1, c2, n = 55665, 52845, 22719, 4
- binary = gsub(binary,".",step)
- return sub(binary,n+1)
- end
-
- -- local pattern = Cs((P(1) / step)^1)
- --
- -- decrypt = function(binary)
- -- r, c1, c2, n = 55665, 52845, 22719, 4
- -- binary = lpegmatch(pattern,binary)
- -- return sub(binary,n+1)
- -- end
-
- end
-
- local function loadpfbvector(filename)
- -- for the moment limited to encoding only
-
- local data = io.loaddata(resolvers.findfile(filename))
-
- if not data then
- report_pfb("no data in %a",filename)
- return
- end
-
- if not (find(data,"!PS%-AdobeFont%-") or find(data,"%%!FontType1")) then
- report_pfb("no font in %a",filename)
- return
- end
-
- local ascii, binary = match(data,"(.*)eexec%s+......(.*)")
-
- if not binary then
- report_pfb("no binary data in %a",filename)
- return
- end
-
- binary = decrypt(binary,4)
-
- local vector = lpegmatch(p_filternames,binary)
-
- if vector[1] == ".notdef" then
- -- tricky
- vector[0] = table.remove(vector,1)
- end
-
- if not vector then
- report_pfb("no vector in %a",filename)
- return
- end
-
- local encoding = lpegmatch(p_filterencoding,ascii)
-
- return vector, encoding
-
- end
-
- local pfb = handlers.pfb or { }
- handlers.pfb = pfb
- pfb.loadvector = loadpfbvector
-
- get_indexes = function(data,pfbname)
- local vector = loadpfbvector(pfbname)
- if vector then
- local characters = data.characters
- if trace_loading then
- report_afm("getting index data from %a",pfbname)
- end
- for index=1,#vector do
- local name = vector[index]
- local char = characters[name]
- if char then
- if trace_indexing then
- report_afm("glyph %a has index %a",name,index)
- end
- char.index = index
- end
- end
- end
- end
-
- if get_indexes_old then
-
- afm.use_new_indexer = true
- get_indexes_new = get_indexes
-
- get_indexes = function(data,pfbname)
- if afm.use_new_indexer then
- return get_indexes_new(data,pfbname)
- else
- return get_indexes_old(data,pfbname)
- end
- end
-
- end
-
-
-end
-
-local function readafm(filename)
- local ok, afmblob, size = resolvers.loadbinfile(filename) -- has logging
- if ok and afmblob then
- local data = {
- resources = {
- filename = resolvers.unresolve(filename),
- version = afm.version,
- creator = "context mkiv",
- },
- properties = {
- hasitalics = false,
- },
- goodies = {
- },
- metadata = {
- filename = file.removesuffix(file.basename(filename))
- },
- characters = {
- -- a temporary store
- },
- descriptions = {
- -- the final store
- },
- }
- afmblob = gsub(afmblob,"StartCharMetrics(.-)EndCharMetrics", function(charmetrics)
- if trace_loading then
- report_afm("loading char metrics")
- end
- get_charmetrics(data,charmetrics,vector)
- return ""
- end)
- afmblob = gsub(afmblob,"StartKernPairs(.-)EndKernPairs", function(kernpairs)
- if trace_loading then
- report_afm("loading kern pairs")
- end
- get_kernpairs(data,kernpairs)
- return ""
- end)
- afmblob = gsub(afmblob,"StartFontMetrics%s+([%d%.]+)(.-)EndFontMetrics", function(version,fontmetrics)
- if trace_loading then
- report_afm("loading variables")
- end
- data.afmversion = version
- get_variables(data,fontmetrics)
- data.fontdimens = scan_comment(fontmetrics) -- todo: all lpeg, no time now
- return ""
- end)
- return data
- else
- if trace_loading then
- report_afm("no valid afm file %a",filename)
- end
- return nil
- end
-end
-
---[[ldx--
-<p>We cache files. Caching is taken care of in the loader. We cheat a bit
-by adding ligatures and kern information to the afm derived data. That
-way we can set them faster when defining a font.</p>
---ldx]]--
-
-local addkerns, addligatures, addtexligatures, unify, normalize, fixnames -- we will implement these later
-
-function afm.load(filename)
- -- hm, for some reasons not resolved yet
- filename = resolvers.findfile(filename,'afm') or ""
- if filename ~= "" and not fonts.names.ignoredfile(filename) then
- local name = file.removesuffix(file.basename(filename))
- local data = containers.read(afm.cache,name)
- local attr = lfs.attributes(filename)
- local size, time = attr.size or 0, attr.modification or 0
- --
- local pfbfile = file.replacesuffix(name,"pfb")
- local pfbname = resolvers.findfile(pfbfile,"pfb") or ""
- if pfbname == "" then
- pfbname = resolvers.findfile(file.basename(pfbfile),"pfb") or ""
- end
- local pfbsize, pfbtime = 0, 0
- if pfbname ~= "" then
- local attr = lfs.attributes(pfbname)
- pfbsize = attr.size or 0
- pfbtime = attr.modification or 0
- end
- if not data or data.size ~= size or data.time ~= time or data.pfbsize ~= pfbsize or data.pfbtime ~= pfbtime then
- report_afm("reading %a",filename)
- data = readafm(filename)
- if data then
- if pfbname ~= "" then
- data.resources.filename = resolvers.unresolve(pfbname)
- get_indexes(data,pfbname)
- elseif trace_loading then
- report_afm("no pfb file for %a",filename)
- -- data.resources.filename = "unset" -- better than loading the afm file
- end
- report_afm("unifying %a",filename)
- unify(data,filename)
- if afm.addligatures then
- report_afm("add ligatures")
- addligatures(data)
- end
- if afm.addtexligatures then
- report_afm("add tex ligatures")
- addtexligatures(data)
- end
- if afm.addkerns then
- report_afm("add extra kerns")
- addkerns(data)
- end
- normalize(data)
- fixnames(data)
- report_afm("add tounicode data")
- fonts.mappings.addtounicode(data,filename)
- data.size = size
- data.time = time
- data.pfbsize = pfbsize
- data.pfbtime = pfbtime
- report_afm("saving %a in cache",name)
- data.resources.unicodes = nil -- consistent with otf but here we save not much
- data = containers.write(afm.cache, name, data)
- data = containers.read(afm.cache,name)
- end
- if applyruntimefixes and data then
- applyruntimefixes(filename,data)
- end
- end
- return data
- else
- return nil
- end
-end
-
-local uparser = fonts.mappings.makenameparser()
-
-unify = function(data, filename)
- local unicodevector = fonts.encodings.agl.unicodes -- loaded runtime in context
- local unicodes = { }
- local names = { }
- local private = constructors.privateoffset
- local descriptions = data.descriptions
- for name, blob in next, data.characters do
- local code = unicodevector[name] -- or characters.name_to_unicode[name]
- if not code then
- code = lpegmatch(uparser,name)
- if not code then
- code = private
- private = private + 1
- report_afm("assigning private slot %U for unknown glyph name %a",code,name)
- end
- end
- local index = blob.index
- unicodes[name] = code
- names[name] = index
- blob.name = name
- descriptions[code] = {
- boundingbox = blob.boundingbox,
- width = blob.width,
- kerns = blob.kerns,
- index = index,
- name = name,
- }
- end
- for unicode, description in next, descriptions do
- local kerns = description.kerns
- if kerns then
- local krn = { }
- for name, kern in next, kerns do
- local unicode = unicodes[name]
- if unicode then
- krn[unicode] = kern
- else
- -- print(unicode,name)
- end
- end
- description.kerns = krn
- end
- end
- data.characters = nil
- local resources = data.resources
- local filename = resources.filename or file.removesuffix(file.basename(filename))
- resources.filename = resolvers.unresolve(filename) -- no shortcut
- resources.unicodes = unicodes -- name to unicode
- resources.marks = { } -- todo
- -- resources.names = names -- name to index
- resources.private = private
-end
-
-local everywhere = { ["*"] = { ["*"] = true } } -- or: { ["*"] = { "*" } }
-local noflags = { false, false, false, false }
-
-afm.experimental_normalize = false
-
-normalize = function(data)
- if type(afm.experimental_normalize) == "function" then
- afm.experimental_normalize(data)
- end
-end
-
-fixnames = function(data)
- for k, v in next, data.descriptions do
- local n = v.name
- local r = overloads[n]
- if r then
- local name = r.name
- if trace_indexing then
- report_afm("renaming characters %a to %a",n,name)
- end
- v.name = name
- v.unicode = r.unicode
- end
- end
-end
-
---[[ldx--
-<p>These helpers extend the basic table with extra ligatures, texligatures
-and extra kerns. This saves quite some lookups later.</p>
---ldx]]--
-
-local addthem = function(rawdata,ligatures)
- if ligatures then
- local descriptions = rawdata.descriptions
- local resources = rawdata.resources
- local unicodes = resources.unicodes
- -- local names = resources.names
- for ligname, ligdata in next, ligatures do
- local one = descriptions[unicodes[ligname]]
- if one then
- for _, pair in next, ligdata do
- local two, three = unicodes[pair[1]], unicodes[pair[2]]
- if two and three then
- local ol = one.ligatures
- if ol then
- if not ol[two] then
- ol[two] = three
- end
- else
- one.ligatures = { [two] = three }
- end
- end
- end
- end
- end
- end
-end
-
-addligatures = function(rawdata) addthem(rawdata,afm.helpdata.ligatures ) end
-addtexligatures = function(rawdata) addthem(rawdata,afm.helpdata.texligatures) end
-
---[[ldx--
-<p>We keep the extra kerns in separate kerning tables so that we can use
-them selectively.</p>
---ldx]]--
-
--- This is rather old code (from the beginning when we had only tfm). If
--- we unify the afm data (now we have names all over the place) then
--- we can use shcodes but there will be many more looping then. But we
--- could get rid of the tables in char-cmp then. Als, in the generic version
--- we don't use the character database. (Ok, we can have a context specific
--- variant).
-
-addkerns = function(rawdata) -- using shcodes is not robust here
- local descriptions = rawdata.descriptions
- local resources = rawdata.resources
- local unicodes = resources.unicodes
- local function do_it_left(what)
- if what then
- for unicode, description in next, descriptions do
- local kerns = description.kerns
- if kerns then
- local extrakerns
- for complex, simple in next, what do
- complex = unicodes[complex]
- simple = unicodes[simple]
- if complex and simple then
- local ks = kerns[simple]
- if ks and not kerns[complex] then
- if extrakerns then
- extrakerns[complex] = ks
- else
- extrakerns = { [complex] = ks }
- end
- end
- end
- end
- if extrakerns then
- description.extrakerns = extrakerns
- end
- end
- end
- end
- end
- local function do_it_copy(what)
- if what then
- for complex, simple in next, what do
- complex = unicodes[complex]
- simple = unicodes[simple]
- if complex and simple then
- local complexdescription = descriptions[complex]
- if complexdescription then -- optional
- local simpledescription = descriptions[complex]
- if simpledescription then
- local extrakerns
- local kerns = simpledescription.kerns
- if kerns then
- for unicode, kern in next, kerns do
- if extrakerns then
- extrakerns[unicode] = kern
- else
- extrakerns = { [unicode] = kern }
- end
- end
- end
- local extrakerns = simpledescription.extrakerns
- if extrakerns then
- for unicode, kern in next, extrakerns do
- if extrakerns then
- extrakerns[unicode] = kern
- else
- extrakerns = { [unicode] = kern }
- end
- end
- end
- if extrakerns then
- complexdescription.extrakerns = extrakerns
- end
- end
- end
- end
- end
- end
- end
- -- add complex with values of simplified when present
- do_it_left(afm.helpdata.leftkerned)
- do_it_left(afm.helpdata.bothkerned)
- -- copy kerns from simple char to complex char unless set
- do_it_copy(afm.helpdata.bothkerned)
- do_it_copy(afm.helpdata.rightkerned)
-end
-
---[[ldx--
-<p>The copying routine looks messy (and is indeed a bit messy).</p>
---ldx]]--
-
-local function adddimensions(data) -- we need to normalize afm to otf i.e. indexed table instead of name
- if data then
- for unicode, description in next, data.descriptions do
- local bb = description.boundingbox
- if bb then
- local ht, dp = bb[4], -bb[2]
- if ht == 0 or ht < 0 then
- -- no need to set it and no negative heights, nil == 0
- else
- description.height = ht
- end
- if dp == 0 or dp < 0 then
- -- no negative depths and no negative depths, nil == 0
- else
- description.depth = dp
- end
- end
- end
- end
-end
-
-local function copytotfm(data)
- if data and data.descriptions then
- local metadata = data.metadata
- local resources = data.resources
- local properties = derivetable(data.properties)
- local descriptions = derivetable(data.descriptions)
- local goodies = derivetable(data.goodies)
- local characters = { }
- local parameters = { }
- local unicodes = resources.unicodes
- --
- for unicode, description in next, data.descriptions do -- use parent table
- characters[unicode] = { }
- end
- --
- local filename = constructors.checkedfilename(resources)
- local fontname = metadata.fontname or metadata.fullname
- local fullname = metadata.fullname or metadata.fontname
- local endash = 0x0020 -- space
- local emdash = 0x2014
- local spacer = "space"
- local spaceunits = 500
- --
- local monospaced = metadata.monospaced
- local charwidth = metadata.charwidth
- local italicangle = metadata.italicangle
- local charxheight = metadata.xheight and metadata.xheight > 0 and metadata.xheight
- properties.monospaced = monospaced
- parameters.italicangle = italicangle
- parameters.charwidth = charwidth
- parameters.charxheight = charxheight
- -- same as otf
- if properties.monospaced then
- if descriptions[endash] then
- spaceunits, spacer = descriptions[endash].width, "space"
- end
- if not spaceunits and descriptions[emdash] then
- spaceunits, spacer = descriptions[emdash].width, "emdash"
- end
- if not spaceunits and charwidth then
- spaceunits, spacer = charwidth, "charwidth"
- end
- else
- if descriptions[endash] then
- spaceunits, spacer = descriptions[endash].width, "space"
- end
- if not spaceunits and charwidth then
- spaceunits, spacer = charwidth, "charwidth"
- end
- end
- spaceunits = tonumber(spaceunits)
- if spaceunits < 200 then
- -- todo: warning
- end
- --
- parameters.slant = 0
- parameters.space = spaceunits
- parameters.space_stretch = 500
- parameters.space_shrink = 333
- parameters.x_height = 400
- parameters.quad = 1000
- --
- if italicangle and italicangle ~= 0 then
- parameters.italicangle = italicangle
- parameters.italicfactor = math.cos(math.rad(90+italicangle))
- parameters.slant = - math.tan(italicangle*math.pi/180)
- end
- if monospaced then
- parameters.space_stretch = 0
- parameters.space_shrink = 0
- elseif afm.syncspace then
- parameters.space_stretch = spaceunits/2
- parameters.space_shrink = spaceunits/3
- end
- parameters.extra_space = parameters.space_shrink
- if charxheight then
- parameters.x_height = charxheight
- else
- -- same as otf
- local x = 0x0078 -- x
- if x then
- local x = descriptions[x]
- if x then
- parameters.x_height = x.height
- end
- end
- --
- end
- local fd = data.fontdimens
- if fd and fd[8] and fd[9] and fd[10] then -- math
- for k,v in next, fd do
- parameters[k] = v
- end
- end
- --
- parameters.designsize = (metadata.designsize or 10)*65536
- parameters.ascender = abs(metadata.ascender or 0)
- parameters.descender = abs(metadata.descender or 0)
- parameters.units = 1000
- --
- properties.spacer = spacer
- properties.encodingbytes = 2
- properties.format = fonts.formats[filename] or "type1"
- properties.filename = filename
- properties.fontname = fontname
- properties.fullname = fullname
- properties.psname = fullname
- properties.name = filename or fullname or fontname
- --
- if next(characters) then
- return {
- characters = characters,
- descriptions = descriptions,
- parameters = parameters,
- resources = resources,
- properties = properties,
- goodies = goodies,
- }
- end
- end
- return nil
-end
-
---[[ldx--
-<p>Originally we had features kind of hard coded for <l n='afm'/>
-files but since I expect to support more font formats, I decided
-to treat this fontformat like any other and handle features in a
-more configurable way.</p>
---ldx]]--
-
-function afm.setfeatures(tfmdata,features)
- local okay = constructors.initializefeatures("afm",tfmdata,features,trace_features,report_afm)
- if okay then
- return constructors.collectprocessors("afm",tfmdata,features,trace_features,report_afm)
- else
- return { } -- will become false
- end
-end
-
-local function addtables(data)
- local resources = data.resources
- local lookuptags = resources.lookuptags
- local unicodes = resources.unicodes
- if not lookuptags then
- lookuptags = { }
- resources.lookuptags = lookuptags
- end
- setmetatableindex(lookuptags,function(t,k)
- local v = type(k) == "number" and ("lookup " .. k) or k
- t[k] = v
- return v
- end)
- if not unicodes then
- unicodes = { }
- resources.unicodes = unicodes
- setmetatableindex(unicodes,function(t,k)
- setmetatableindex(unicodes,nil)
- for u, d in next, data.descriptions do
- local n = d.name
- if n then
- t[n] = u
- end
- end
- return rawget(t,k)
- end)
- end
- constructors.addcoreunicodes(unicodes) -- do we really need this?
-end
-
-local function afmtotfm(specification)
- local afmname = specification.filename or specification.name
- if specification.forced == "afm" or specification.format == "afm" then -- move this one up
- if trace_loading then
- report_afm("forcing afm format for %a",afmname)
- end
- else
- local tfmname = findbinfile(afmname,"ofm") or ""
- if tfmname ~= "" then
- if trace_loading then
- report_afm("fallback from afm to tfm for %a",afmname)
- end
- return -- just that
- end
- end
- if afmname ~= "" then
- -- weird, isn't this already done then?
- local features = constructors.checkedfeatures("afm",specification.features.normal)
- specification.features.normal = features
- constructors.hashinstance(specification,true) -- also weird here
- --
- specification = definers.resolve(specification) -- new, was forgotten
- local cache_id = specification.hash
- local tfmdata = containers.read(constructors.cache, cache_id) -- cache with features applied
- if not tfmdata then
- local rawdata = afm.load(afmname)
- if rawdata and next(rawdata) then
- addtables(rawdata)
- adddimensions(rawdata)
- tfmdata = copytotfm(rawdata)
- if tfmdata and next(tfmdata) then
- local shared = tfmdata.shared
- if not shared then
- shared = { }
- tfmdata.shared = shared
- end
- shared.rawdata = rawdata
- shared.features = features
- shared.processes = afm.setfeatures(tfmdata,features)
- end
- elseif trace_loading then
- report_afm("no (valid) afm file found with name %a",afmname)
- end
- tfmdata = containers.write(constructors.cache,cache_id,tfmdata)
- end
- return tfmdata
- end
-end
-
---[[ldx--
-<p>As soon as we could intercept the <l n='tfm'/> reader, I implemented an
-<l n='afm'/> reader. Since traditional <l n='pdftex'/> could use <l n='opentype'/>
-fonts with <l n='afm'/> companions, the following method also could handle
-those cases, but now that we can handle <l n='opentype'/> directly we no longer
-need this features.</p>
---ldx]]--
-
-local function read_from_afm(specification)
- local tfmdata = afmtotfm(specification)
- if tfmdata then
- tfmdata.properties.name = specification.name
- tfmdata = constructors.scale(tfmdata, specification)
- local allfeatures = tfmdata.shared.features or specification.features.normal
- constructors.applymanipulators("afm",tfmdata,allfeatures,trace_features,report_afm)
- fonts.loggers.register(tfmdata,'afm',specification)
- end
- return tfmdata
-end
-
---[[ldx--
-<p>Here comes the implementation of a few features. We only implement
-those that make sense for this format.</p>
---ldx]]--
-
-local function prepareligatures(tfmdata,ligatures,value)
- if value then
- local descriptions = tfmdata.descriptions
- local hasligatures = false
- for unicode, character in next, tfmdata.characters do
- local description = descriptions[unicode]
- local dligatures = description.ligatures
- if dligatures then
- local cligatures = character.ligatures
- if not cligatures then
- cligatures = { }
- character.ligatures = cligatures
- end
- for unicode, ligature in next, dligatures do
- cligatures[unicode] = {
- char = ligature,
- type = 0
- }
- end
- hasligatures = true
- end
- end
- tfmdata.properties.hasligatures = hasligatures
- end
-end
-
-local function preparekerns(tfmdata,kerns,value)
- if value then
- local rawdata = tfmdata.shared.rawdata
- local resources = rawdata.resources
- local unicodes = resources.unicodes
- local descriptions = tfmdata.descriptions
- local haskerns = false
- for u, chr in next, tfmdata.characters do
- local d = descriptions[u]
- local newkerns = d[kerns]
- if newkerns then
- local kerns = chr.kerns
- if not kerns then
- kerns = { }
- chr.kerns = kerns
- end
- for k,v in next, newkerns do
- local uk = unicodes[k]
- if uk then
- kerns[uk] = v
- end
- end
- haskerns = true
- end
- end
- tfmdata.properties.haskerns = haskerns
- end
-end
-
-local list = {
- -- [0x0022] = 0x201D,
- [0x0027] = 0x2019,
- -- [0x0060] = 0x2018,
-}
-
-local function texreplacements(tfmdata,value)
- local descriptions = tfmdata.descriptions
- local characters = tfmdata.characters
- for k, v in next, list do
- characters [k] = characters [v] -- we forget about kerns
- descriptions[k] = descriptions[v] -- we forget about kerns
- end
-end
-
-local function ligatures (tfmdata,value) prepareligatures(tfmdata,'ligatures', value) end
-local function texligatures(tfmdata,value) prepareligatures(tfmdata,'texligatures',value) end
-local function kerns (tfmdata,value) preparekerns (tfmdata,'kerns', value) end
-local function extrakerns (tfmdata,value) preparekerns (tfmdata,'extrakerns', value) end
-
-registerafmfeature {
- name = "liga",
- description = "traditional ligatures",
- initializers = {
- base = ligatures,
- node = ligatures,
- }
-}
-
-registerafmfeature {
- name = "kern",
- description = "intercharacter kerning",
- initializers = {
- base = kerns,
- node = kerns,
- }
-}
-
-registerafmfeature {
- name = "extrakerns",
- description = "additional intercharacter kerning",
- initializers = {
- base = extrakerns,
- node = extrakerns,
- }
-}
-
-registerafmfeature {
- name = 'tlig',
- description = 'tex ligatures',
- initializers = {
- base = texligatures,
- node = texligatures,
- }
-}
-
-registerafmfeature {
- name = 'trep',
- description = 'tex replacements',
- initializers = {
- base = texreplacements,
- node = texreplacements,
- }
-}
-
--- readers
-
-fonts.formats.afm = "type1"
-fonts.formats.pfb = "type1"
-
-local function check_afm(specification,fullname)
- local foundname = findbinfile(fullname, 'afm') or "" -- just to be sure
- if foundname == "" then
- foundname = fonts.names.getfilename(fullname,"afm") or ""
- end
- if foundname == "" and afm.autoprefixed then
- local encoding, shortname = match(fullname,"^(.-)%-(.*)$") -- context: encoding-name.*
- if encoding and shortname and fonts.encodings.known[encoding] then
- shortname = findbinfile(shortname,'afm') or "" -- just to be sure
- if shortname ~= "" then
- foundname = shortname
- if trace_defining then
- report_afm("stripping encoding prefix from filename %a",afmname)
- end
- end
- end
- end
- if foundname ~= "" then
- specification.filename = foundname
- specification.format = "afm"
- return read_from_afm(specification)
- end
-end
-
-function readers.afm(specification,method)
- local fullname, tfmdata = specification.filename or "", nil
- if fullname == "" then
- local forced = specification.forced or ""
- if forced ~= "" then
- tfmdata = check_afm(specification,specification.name .. "." .. forced)
- end
- if not tfmdata then
- local check_tfm = readers.check_tfm
- method = (check_tfm and (method or definers.method or "afm or tfm")) or "afm"
- if method == "tfm" then
- tfmdata = check_tfm(specification,specification.name)
- elseif method == "afm" then
- tfmdata = check_afm(specification,specification.name)
- elseif method == "tfm or afm" then
- tfmdata = check_tfm(specification,specification.name) or check_afm(specification,specification.name)
- else -- method == "afm or tfm" or method == "" then
- tfmdata = check_afm(specification,specification.name) or check_tfm(specification,specification.name)
- end
- end
- else
- tfmdata = check_afm(specification,fullname)
- end
- return tfmdata
-end
-
-function readers.pfb(specification,method) -- only called when forced
- local original = specification.specification
- if trace_defining then
- report_afm("using afm reader for %a",original)
- end
- specification.specification = gsub(original,"%.pfb",".afm")
- specification.forced = "afm"
- return readers.afm(specification,method)
-end
diff --git a/tex/context/base/mkiv/font-cft.lua b/tex/context/base/mkiv/font-cft.lua
index 63c056022..fcc5d7b5a 100644
--- a/tex/context/base/mkiv/font-cft.lua
+++ b/tex/context/base/mkiv/font-cft.lua
@@ -7,16 +7,18 @@ if not modules then modules = { } end modules ['font-cft'] = {
}
-- context font tables
-
+--
-- todo: extra:
--
-- extra_space => space.extra
-- space => space.width
-- space_stretch => space.stretch
-- space_shrink => space.shrink
-
+--
-- We do keep the x-height, extra_space, space_shrink and space_stretch
-- around as these are low level official names.
+--
+-- Needs to be checked and updated.
local type = type
diff --git a/tex/context/base/mkiv/font-cid.lua b/tex/context/base/mkiv/font-cid.lua
index 0eaacdfbd..781bc9fec 100644
--- a/tex/context/base/mkiv/font-cid.lua
+++ b/tex/context/base/mkiv/font-cid.lua
@@ -1,6 +1,6 @@
if not modules then modules = { } end modules ['font-cid'] = {
version = 1.001,
- comment = "companion to font-otf.lua (cidmaps)",
+ comment = "companion to font-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
diff --git a/tex/context/base/mkiv/font-col.lua b/tex/context/base/mkiv/font-col.lua
index bce16fae7..8c96202ec 100644
--- a/tex/context/base/mkiv/font-col.lua
+++ b/tex/context/base/mkiv/font-col.lua
@@ -49,6 +49,7 @@ collections.vectors = vectors
local fontdata = fonts.hashes.identifiers
local chardata = fonts.hashes.characters
+local propdata = fonts.hashes.properties
local currentfont = font.current
local fontpatternhassize = fonts.helpers.fontpatternhassize
@@ -220,13 +221,15 @@ function collections.prepare(name) -- we can do this in lua now .. todo
if vectors[current] then
return
end
- if fontdata[current].mathparameters then
+ local properties = propdata[current]
+ local mathsize = properties.mathsize
+ if mathsize == 1 or mathsize == 2 or mathsize == 3 then
return
end
local d = definitions[name]
if d then
if trace_collecting then
- local filename = file.basename(fontdata[current].properties.filename or "?")
+ local filename = file.basename(properties.filename or "?")
report_fonts("applying collection %a to %a, file %a",name,current,filename)
end
list = { }
@@ -246,9 +249,6 @@ function collections.prepare(name) -- we can do this in lua now .. todo
context.font_fallbacks_prepare_clone_vectors(name)
context.font_fallbacks_stop_cloning()
context.popcatcodes() -- context.protect()
- elseif trace_collecting then
- local filename = file.basename(fontdata[current].properties.filename or "?")
- report_fonts("error while applying collection %a to %a, file %a",name,current,filename)
end
end
diff --git a/tex/context/base/mkiv/font-con.lua b/tex/context/base/mkiv/font-con.lua
index 85ac33a10..e328d56d2 100644
--- a/tex/context/base/mkiv/font-con.lua
+++ b/tex/context/base/mkiv/font-con.lua
@@ -322,7 +322,7 @@ function constructors.scale(tfmdata,specification)
--
local mathsize = tonumber(specification.mathsize) or 0
local textsize = tonumber(specification.textsize) or scaledpoints
- local forcedsize = tonumber(parameters.mathsize ) or 0
+ local forcedsize = tonumber(parameters.mathsize ) or 0 -- can be set by the feature "mathsize"
local extrafactor = tonumber(specification.factor ) or 1
if (mathsize == 2 or forcedsize == 2) and parameters.scriptpercentage then
scaledpoints = parameters.scriptpercentage * textsize / 100
@@ -330,6 +330,8 @@ function constructors.scale(tfmdata,specification)
scaledpoints = parameters.scriptscriptpercentage * textsize / 100
elseif forcedsize > 1000 then -- safeguard
scaledpoints = forcedsize
+ else
+ -- in context x and xx also use mathsize
end
targetparameters.mathsize = mathsize -- context specific
targetparameters.textsize = textsize -- context specific
diff --git a/tex/context/base/mkiv/font-ctx.lua b/tex/context/base/mkiv/font-ctx.lua
index 578babc75..d375d4679 100644
--- a/tex/context/base/mkiv/font-ctx.lua
+++ b/tex/context/base/mkiv/font-ctx.lua
@@ -1299,6 +1299,8 @@ do -- else too many locals
-- stoptiming(fonts)
-- end
+ local busy = false
+
scanners.definefont_two = function()
local global = scanboolean() -- \ifx\fontclass\empty\s!false\else\s!true\fi
@@ -1420,6 +1422,7 @@ do -- else too many locals
-- setting the extra characters will move elsewhere
local characters = tfmdata.characters
local parameters = tfmdata.parameters
+ local properties = tfmdata.properties
-- we use char0 as signal; cf the spec pdf can handle this (no char in slot)
characters[0] = nil
-- characters[0x00A0] = { width = parameters.space }
@@ -1428,25 +1431,32 @@ do -- else too many locals
--
constructors.checkvirtualids(tfmdata) -- experiment, will become obsolete when slots can selfreference
local fallbacks = specification.fallbacks
- if fallbacks and fallbacks ~= "" and tfmdata.properties.hasmath then
+ local mathsize = (mathsize == 1 or mathsize == 2 or mathsize == 3) and mathsize or nil -- can be unset so we test 1 2 3
+ if fallbacks and fallbacks ~= "" and mathsize and not busy then
+ busy = true
-- We need this ugly hack in order to resolve fontnames (at the \TEX end). Originally
-- math was done in Lua after loading (plugged into aftercopying).
--
- -- After tl 2017 I'll also do text falbacks this way (although backups there are done
+ -- After tl 2017 I'll also do text fallbacks this way (although backups there are done
-- in a completely different way.
+ if trace_defining then
+ report_defining("defining %a, id %a, target %a, features %a / %a, fallbacks %a / %a, step %a",
+ name,id,nice_cs(cs),classfeatures,fontfeatures,classfallbacks,fontfallbacks,1)
+ end
mathematics.resolvefallbacks(tfmdata,specification,fallbacks)
context(function()
+ busy = false
mathematics.finishfallbacks(tfmdata,specification,fallbacks)
local id = definefont(tfmdata)
csnames[id] = specification.cs
- tfmdata.properties.id = id
+ properties.id = id
definers.register(tfmdata,id) -- to be sure, normally already done
texdefinefont(global,cs,id)
constructors.cleanuptable(tfmdata)
constructors.finalize(tfmdata)
if trace_defining then
- report_defining("defining %a, id %a, target %a, features %a / %a, fallbacks %a / %a",
- name,id,nice_cs(cs),classfeatures,fontfeatures,classfallbacks,fontfallbacks)
+ report_defining("defining %a, id %a, target %a, features %a / %a, fallbacks %a / %a, step %a",
+ name,id,nice_cs(cs),classfeatures,fontfeatures,classfallbacks,fontfallbacks,2)
end
-- resolved (when designsize is used):
local size = tfmdata.parameters.size or 655360
@@ -1455,13 +1465,20 @@ do -- else too many locals
texsetcount("scaledfontsize",size)
lastfontid = id
--
+ if trace_defining then
+ report_defining("memory usage after: %s",statistics.memused())
+ report_defining("stop stage two")
+ end
+ --
texsetcount("global","lastfontid",lastfontid)
specifiers[lastfontid] = { str, size }
if not mathsize then
- -- forget about it
+ -- forget about it (can't happen here)
elseif mathsize == 0 then
+ -- can't happen (here)
lastmathids[1] = lastfontid
else
+ -- maybe only 1 2 3 (we already test for this)
lastmathids[mathsize] = lastfontid
end
stoptiming(fonts)
@@ -1470,14 +1487,14 @@ do -- else too many locals
else
local id = definefont(tfmdata)
csnames[id] = specification.cs
- tfmdata.properties.id = id
+ properties.id = id
definers.register(tfmdata,id) -- to be sure, normally already done
texdefinefont(global,cs,id)
constructors.cleanuptable(tfmdata)
constructors.finalize(tfmdata)
if trace_defining then
- report_defining("defining %a, id %a, target %a, features %a / %a, fallbacks %a / %a",
- name,id,nice_cs(cs),classfeatures,fontfeatures,classfallbacks,fontfallbacks)
+ report_defining("defining %a, id %a, target %a, features %a / %a, fallbacks %a / %a, step %a",
+ name,id,nice_cs(cs),classfeatures,fontfeatures,classfallbacks,fontfallbacks,"-")
end
-- resolved (when designsize is used):
local size = tfmdata.parameters.size or 655360
@@ -1516,7 +1533,7 @@ do -- else too many locals
-- forget about it
elseif mathsize == 0 then
lastmathids[1] = lastfontid
- else
+ else -- maybe only 1 2 3
lastmathids[mathsize] = lastfontid
end
--
diff --git a/tex/context/base/mkiv/font-inj.lua b/tex/context/base/mkiv/font-inj.lua
deleted file mode 100644
index ccc41d3f3..000000000
--- a/tex/context/base/mkiv/font-inj.lua
+++ /dev/null
@@ -1,1194 +0,0 @@
-if not modules then modules = { } end modules ['font-inj'] = {
- version = 1.001,
- comment = "companion to font-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- This property based variant is not faster but looks nicer than the attribute one. We
--- need to use rawget (which is apbout 4 times slower than a direct access but we cannot
--- get/set that one for our purpose! This version does a bit more with discretionaries
--- (and Kai has tested it with his collection of weird fonts.)
-
--- There is some duplicate code here (especially in the the pre/post/replace branches) but
--- we go for speed. We could store a list of glyph and mark nodes when registering but it's
--- cleaner to have an identification pass here. Also, I need to keep tracing in mind so
--- being too clever here is dangerous.
-
--- The subtype test is not needed as there will be no (new) properties set, given that we
--- reset the properties.
-
-if not nodes.properties then return end
-
-local next, rawget = next, rawget
-local fastcopy = table.fastcopy
-
-local trace_injections = false trackers.register("fonts.injections", function(v) trace_injections = v end)
-
-local report_injections = logs.reporter("fonts","injections")
-
-local attributes, nodes, node = attributes, nodes, node
-
-fonts = fonts
-local fontdata = fonts.hashes.identifiers
-
-nodes.injections = nodes.injections or { }
-local injections = nodes.injections
-
-local nodecodes = nodes.nodecodes
-local glyph_code = nodecodes.glyph
-local disc_code = nodecodes.disc
-local kern_code = nodecodes.kern
-
-local nuts = nodes.nuts
-local nodepool = nuts.pool
-
-local newkern = nodepool.kern
-
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getfont = nuts.getfont
-local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
-
-local traverse_id = nuts.traverse_id
-local insert_node_before = nuts.insert_before
-local insert_node_after = nuts.insert_after
-local find_tail = nuts.tail
-
-local properties = nodes.properties.data
-
-function injections.installnewkern(nk)
- newkern = nk or newkern
-end
-
-local nofregisteredkerns = 0
-local nofregisteredpairs = 0
-local nofregisteredmarks = 0
-local nofregisteredcursives = 0
-local keepregisteredcounts = false
-
-function injections.keepcounts()
- keepregisteredcounts = true
-end
-
-function injections.resetcounts()
- nofregisteredkerns = 0
- nofregisteredpairs = 0
- nofregisteredmarks = 0
- nofregisteredcursives = 0
- keepregisteredcounts = false
-end
-
--- We need to make sure that a possible metatable will not kick in unexpectedly.
-
--- function injections.reset(n)
--- local p = rawget(properties,n)
--- if p and rawget(p,"injections") then
--- p.injections = nil
--- end
--- end
-
--- function injections.copy(target,source)
--- local sp = rawget(properties,source)
--- if sp then
--- local tp = rawget(properties,target)
--- local si = rawget(sp,"injections")
--- if si then
--- si = fastcopy(si)
--- if tp then
--- tp.injections = si
--- else
--- propertydata[target] = {
--- injections = si,
--- }
--- end
--- else
--- if tp then
--- tp.injections = nil
--- end
--- end
--- end
--- end
-
-function injections.reset(n)
- local p = rawget(properties,n)
- if p then
- p.injections = false -- { }
- else
- properties[n] = false -- { injections = { } }
- end
-end
-
-function injections.copy(target,source)
- local sp = rawget(properties,source)
- if sp then
- local tp = rawget(properties,target)
- local si = rawget(sp,"injections")
- if si then
- si = fastcopy(si)
- if tp then
- tp.injections = si
- else
- propertydata[target] = {
- injections = si,
- }
- end
- elseif tp then
- tp.injections = false -- { }
- else
- properties[target] = { injections = { } }
- end
- else
- local tp = rawget(properties,target)
- if tp then
- tp.injections = false -- { }
- else
- properties[target] = false -- { injections = { } }
- end
- end
-end
-
-function injections.setligaindex(n,index)
- local p = rawget(properties,n)
- if p then
- local i = rawget(p,"injections")
- if i then
- i.ligaindex = index
- else
- p.injections = {
- ligaindex = index
- }
- end
- else
- properties[n] = {
- injections = {
- ligaindex = index
- }
- }
- end
-end
-
-function injections.getligaindex(n,default)
- local p = rawget(properties,n)
- if p then
- local i = rawget(p,"injections")
- if i then
- return i.ligaindex or default
- end
- end
- return default
-end
-
-function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext) -- hm: nuts or nodes
- local dx = factor*(exit[1]-entry[1])
- local dy = -factor*(exit[2]-entry[2])
- local ws = tfmstart.width
- local wn = tfmnext.width
- nofregisteredcursives = nofregisteredcursives + 1
- if rlmode < 0 then
- dx = -(dx + wn)
- else
- dx = dx - ws
- end
- --
- local p = rawget(properties,start)
- if p then
- local i = rawget(p,"injections")
- if i then
- i.cursiveanchor = true
- else
- p.injections = {
- cursiveanchor = true,
- }
- end
- else
- properties[start] = {
- injections = {
- cursiveanchor = true,
- },
- }
- end
- local p = rawget(properties,nxt)
- if p then
- local i = rawget(p,"injections")
- if i then
- i.cursivex = dx
- i.cursivey = dy
- else
- p.injections = {
- cursivex = dx,
- cursivey = dy,
- }
- end
- else
- properties[nxt] = {
- injections = {
- cursivex = dx,
- cursivey = dy,
- },
- }
- end
- return dx, dy, nofregisteredcursives
-end
-
-function injections.setpair(current,factor,rlmode,r2lflag,spec,injection) -- r2lflag & tfmchr not used
- local x = factor*spec[1]
- local y = factor*spec[2]
- local w = factor*spec[3]
- local h = factor*spec[4]
- if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then -- okay?
- local yoffset = y - h
- local leftkern = x -- both kerns are set in a pair kern compared
- local rightkern = w - x -- to normal kerns where we set only leftkern
- if leftkern ~= 0 or rightkern ~= 0 or yoffset ~= 0 then
- nofregisteredpairs = nofregisteredpairs + 1
- if rlmode and rlmode < 0 then
- leftkern, rightkern = rightkern, leftkern
- end
- if not injection then
- injection = "injections"
- end
- local p = rawget(properties,current)
- if p then
- local i = rawget(p,injection)
- if i then
- if leftkern ~= 0 then
- i.leftkern = (i.leftkern or 0) + leftkern
- end
- if rightkern ~= 0 then
- i.rightkern = (i.rightkern or 0) + rightkern
- end
- if yoffset ~= 0 then
- i.yoffset = (i.yoffset or 0) + yoffset
- end
- elseif leftkern ~= 0 or rightkern ~= 0 then
- p[injection] = {
- leftkern = leftkern,
- rightkern = rightkern,
- yoffset = yoffset,
- }
- else
- p[injection] = {
- yoffset = yoffset,
- }
- end
- elseif leftkern ~= 0 or rightkern ~= 0 then
- properties[current] = {
- [injection] = {
- leftkern = leftkern,
- rightkern = rightkern,
- yoffset = yoffset,
- },
- }
- else
- properties[current] = {
- [injection] = {
- yoffset = yoffset,
- },
- }
- end
- return x, y, w, h, nofregisteredpairs
- end
- end
- return x, y, w, h -- no bound
-end
-
--- This needs checking for rl < 0 but it is unlikely that a r2l script uses kernclasses between
--- glyphs so we're probably safe (KE has a problematic font where marks interfere with rl < 0 in
--- the previous case)
-
-function injections.setkern(current,factor,rlmode,x,injection)
- local dx = factor * x
- if dx ~= 0 then
- nofregisteredkerns = nofregisteredkerns + 1
- local p = rawget(properties,current)
- if not injection then
- injection = "injections"
- end
- if p then
- local i = rawget(p,injection)
- if i then
- i.leftkern = dx + (i.leftkern or 0)
- else
- p[injection] = {
- leftkern = dx,
- }
- end
- else
- properties[current] = {
- [injection] = {
- leftkern = dx,
- },
- }
- end
- return dx, nofregisteredkerns
- else
- return 0, 0
- end
-end
-
-function injections.setmark(start,base,factor,rlmode,ba,ma,tfmbase,mkmk) -- ba=baseanchor, ma=markanchor
- local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2])
- nofregisteredmarks = nofregisteredmarks + 1
- -- markanchors[nofregisteredmarks] = base
- if rlmode >= 0 then
- dx = tfmbase.width - dx -- see later commented ox
- end
- local p = rawget(properties,start)
- -- hm, dejavu serif does a sloppy mark2mark before mark2base
- if p then
- local i = rawget(p,"injections")
- if i then
- if i.markmark then
- -- out of order mkmk: yes or no or option
- else
- i.markx = dx
- i.marky = dy
- i.markdir = rlmode or 0
- i.markbase = nofregisteredmarks
- i.markbasenode = base
- i.markmark = mkmk
- end
- else
- p.injections = {
- markx = dx,
- marky = dy,
- markdir = rlmode or 0,
- markbase = nofregisteredmarks,
- markbasenode = base,
- markmark = mkmk,
- }
- end
- else
- properties[start] = {
- injections = {
- markx = dx,
- marky = dy,
- markdir = rlmode or 0,
- markbase = nofregisteredmarks,
- markbasenode = base,
- markmark = mkmk,
- },
- }
- end
- return dx, dy, nofregisteredmarks
-end
-
-local function dir(n)
- return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
-end
-
-local function showchar(n,nested)
- local char = getchar(n)
- report_injections("%wfont %s, char %U, glyph %c",nested and 2 or 0,getfont(n),char,char)
-end
-
-local function show(n,what,nested,symbol)
- if n then
- local p = rawget(properties,n)
- if p then
- local i = rawget(p,what)
- if i then
- local leftkern = i.leftkern or 0
- local rightkern = i.rightkern or 0
- local yoffset = i.yoffset or 0
- local markx = i.markx or 0
- local marky = i.marky or 0
- local markdir = i.markdir or 0
- local markbase = i.markbase or 0
- local cursivex = i.cursivex or 0
- local cursivey = i.cursivey or 0
- local ligaindex = i.ligaindex or 0
- local cursbase = i.cursiveanchor
- local margin = nested and 4 or 2
- --
- if rightkern ~= 0 or yoffset ~= 0 then
- report_injections("%w%s pair: lx %p, rx %p, dy %p",margin,symbol,leftkern,rightkern,yoffset)
- elseif leftkern ~= 0 then
- report_injections("%w%s kern: dx %p",margin,symbol,leftkern)
- end
- if markx ~= 0 or marky ~= 0 or markbase ~= 0 then
- report_injections("%w%s mark: dx %p, dy %p, dir %s, base %s",margin,symbol,markx,marky,markdir,markbase ~= 0 and "yes" or "no")
- end
- if cursivex ~= 0 or cursivey ~= 0 then
- if cursbase then
- report_injections("%w%s curs: base dx %p, dy %p",margin,symbol,cursivex,cursivey)
- else
- report_injections("%w%s curs: dx %p, dy %p",margin,symbol,cursivex,cursivey)
- end
- elseif cursbase then
- report_injections("%w%s curs: base",margin,symbol)
- end
- if ligaindex ~= 0 then
- report_injections("%w%s liga: index %i",margin,symbol,ligaindex)
- end
- end
- end
- end
-end
-
-local function showsub(n,what,where)
- report_injections("begin subrun: %s",where)
- for n in traverse_id(glyph_code,n) do
- showchar(n,where)
- show(n,what,where," ")
- end
- report_injections("end subrun")
-end
-
-local function trace(head,where)
- report_injections("begin run %s: %s kerns, %s pairs, %s marks and %s cursives registered",
- where or "",nofregisteredkerns,nofregisteredpairs,nofregisteredmarks,nofregisteredcursives)
- local n = head
- while n do
- local id = getid(n)
- if id == glyph_code then
- showchar(n)
- show(n,"injections",false," ")
- show(n,"preinjections",false,"<")
- show(n,"postinjections",false,">")
- show(n,"replaceinjections",false,"=")
- elseif id == disc_code then
- local pre = getfield(n,"pre")
- local post = getfield(n,"post")
- local replace = getfield(n,"replace")
- if pre then
- showsub(pre,"preinjections","pre")
- end
- if post then
- showsub(post,"postinjections","post")
- end
- if replace then
- showsub(replace,"replaceinjections","replace")
- end
- end
- n = getnext(n)
- end
- report_injections("end run")
-end
-
-local function show_result(head)
- local current = head
- local skipping = false
- while current do
- local id = getid(current)
- if id == glyph_code then
- report_injections("char: %C, width %p, xoffset %p, yoffset %p",
- getchar(current),getfield(current,"width"),getfield(current,"xoffset"),getfield(current,"yoffset"))
- skipping = false
- elseif id == kern_code then
- report_injections("kern: %p",getfield(current,"kern"))
- skipping = false
- elseif not skipping then
- report_injections()
- skipping = true
- end
- current = getnext(current)
- end
-end
-
-local function collect_glyphs(head,offsets)
- local glyphs, glyphi, nofglyphs = { }, { }, 0
- local marks, marki, nofmarks = { }, { }, 0
- local nf, tm = nil, nil
- local n = head
-
- local function identify(n,what)
- local f = getfont(n)
- if f ~= nf then
- nf = f
- -- other hash in ctx:
- tm = fontdata[nf].resources
- if tm then
- tm = tm.marks
- end
- end
- if tm and tm[getchar(n)] then
- nofmarks = nofmarks + 1
- marks[nofmarks] = n
- marki[nofmarks] = "injections"
- else
- nofglyphs = nofglyphs + 1
- glyphs[nofglyphs] = n
- glyphi[nofglyphs] = what
- end
- if offsets then
- -- yoffsets can influence curs steps
- local p = rawget(properties,n)
- if p then
- local i = rawget(p,what)
- if i then
- local yoffset = i.yoffset
- if yoffset and yoffset ~= 0 then
- setfield(n,"yoffset",yoffset)
- end
- end
- end
- end
- end
-
- while n do -- only needed for relevant fonts
- local id = getid(n)
- if id == glyph_code then
- identify(n,"injections")
- elseif id == disc_code then
- local d = getfield(n,"pre")
- if d then
- for n in traverse_id(glyph_code,d) do
- if getsubtype(n) < 256 then
- identify(n,"preinjections")
- end
- end
- end
- local d = getfield(n,"post")
- if d then
- for n in traverse_id(glyph_code,d) do
- if getsubtype(n) < 256 then
- identify(n,"postinjections")
- end
- end
- end
- local d = getfield(n,"replace")
- if d then
- for n in traverse_id(glyph_code,d) do
- if getsubtype(n) < 256 then
- identify(n,"replaceinjections")
- end
- end
- end
- end
- n = getnext(n)
- end
-
- return glyphs, glyphi, nofglyphs, marks, marki, nofmarks
-end
-
-local function inject_marks(marks,marki,nofmarks)
- for i=1,nofmarks do
- local n = marks[i]
- local pn = rawget(properties,n)
- if pn then
- local ni = marki[i]
- local pn = rawget(pn,ni)
- if pn then
- local p = pn.markbasenode
- if p then
- local px = getfield(p,"xoffset")
- local ox = 0
- local rightkern = nil
- local pp = rawget(properties,p)
- if pp then
- pp = rawget(pp,ni)
- if pp then
- rightkern = pp.rightkern
- end
- end
- if rightkern then -- x and w ~= 0
- if pn.markdir < 0 then
- -- kern(w-x) glyph(p) kern(x) mark(n)
- ox = px - pn.markx - rightkern
- -- report_injections("r2l case 1: %p",ox)
- else
- -- kern(x) glyph(p) kern(w-x) mark(n)
- -- ox = px - getfield(p,"width") + pn.markx - pp.leftkern
- --
- -- According to Kai we don't need to handle leftkern here but I'm
- -- pretty sure I've run into a case where it was needed so maybe
- -- some day we need something more clever here.
- --
- if false then
- -- a mark with kerning
- local leftkern = pp.leftkern
- if leftkern then
- ox = px - pn.markx - leftkern
- else
- ox = px - pn.markx
- end
- else
- ox = px - pn.markx
- end
- end
- else
- -- we need to deal with fonts that have marks with width
- -- if pn.markdir < 0 then
- -- ox = px - pn.markx
- -- -- report_injections("r2l case 3: %p",ox)
- -- else
- -- -- ox = px - getfield(p,"width") + pn.markx
- ox = px - pn.markx
- -- report_injections("l2r case 3: %p",ox)
- -- end
- local wn = getfield(n,"width") -- in arial marks have widths
- if wn ~= 0 then
- -- bad: we should center
- -- insert_node_before(head,n,newkern(-wn/2))
- -- insert_node_after(head,n,newkern(-wn/2))
- pn.leftkern = -wn/2
- pn.rightkern = -wn/2
- -- wx[n] = { 0, -wn/2, 0, -wn }
- end
- end
- setfield(n,"xoffset",ox)
- --
- local py = getfield(p,"yoffset")
- -- local oy = 0
- -- if marks[p] then
- -- oy = py + pn.marky
- -- else
- -- oy = getfield(n,"yoffset") + py + pn.marky
- -- end
- local oy = getfield(n,"yoffset") + py + pn.marky
- setfield(n,"yoffset",oy)
- else
- -- normally this can't happen (only when in trace mode which is a special case anyway)
- -- report_injections("missing mark anchor %i",pn.markbase or 0)
- end
- end
- end
- end
-end
-
-local function inject_cursives(glyphs,glyphi,nofglyphs)
- local cursiveanchor, lastanchor = nil, nil
- local minc, maxc, last = 0, 0, nil
- for i=1,nofglyphs do
- local n = glyphs[i]
- local pn = rawget(properties,n)
- if pn then
- pn = rawget(pn,glyphi[i])
- end
- if pn then
- local cursivex = pn.cursivex
- if cursivex then
- if cursiveanchor then
- if cursivex ~= 0 then
- pn.leftkern = (pn.leftkern or 0) + cursivex
- end
- if lastanchor then
- if maxc == 0 then
- minc = lastanchor
- end
- maxc = lastanchor
- properties[cursiveanchor].cursivedy = pn.cursivey
- end
- last = n
- else
- maxc = 0
- end
- elseif maxc > 0 then
- local ny = getfield(n,"yoffset")
- for i=maxc,minc,-1 do
- local ti = glyphs[i]
- ny = ny + properties[ti].cursivedy
- setfield(ti,"yoffset",ny) -- why not add ?
- end
- maxc = 0
- end
- if pn.cursiveanchor then
- cursiveanchor = n
- lastanchor = i
- else
- cursiveanchor = nil
- lastanchor = nil
- if maxc > 0 then
- local ny = getfield(n,"yoffset")
- for i=maxc,minc,-1 do
- local ti = glyphs[i]
- ny = ny + properties[ti].cursivedy
- setfield(ti,"yoffset",ny) -- why not add ?
- end
- maxc = 0
- end
- end
- elseif maxc > 0 then
- local ny = getfield(n,"yoffset")
- for i=maxc,minc,-1 do
- local ti = glyphs[i]
- ny = ny + properties[ti].cursivedy
- setfield(ti,"yoffset",getfield(ti,"yoffset") + ny) -- ?
- end
- maxc = 0
- cursiveanchor = nil
- lastanchor = nil
- end
- -- if maxc > 0 and not cursiveanchor then
- -- local ny = getfield(n,"yoffset")
- -- for i=maxc,minc,-1 do
- -- local ti = glyphs[i][1]
- -- ny = ny + properties[ti].cursivedy
- -- setfield(ti,"yoffset",ny) -- why not add ?
- -- end
- -- maxc = 0
- -- end
- end
- if last and maxc > 0 then
- local ny = getfield(last,"yoffset")
- for i=maxc,minc,-1 do
- local ti = glyphs[i]
- ny = ny + properties[ti].cursivedy
- setfield(ti,"yoffset",ny) -- why not add ?
- end
- end
-end
-
--- G +D-pre G
--- D-post+
--- +D-replace+
---
--- G +D-pre +D-pre
--- D-post +D-post
--- +D-replace +D-replace
-
-local function inject_kerns(head,glist,ilist,length) -- not complete ! compare with inject_kerns_only (but unlikely disc here)
- for i=1,length do
- local n = glist[i]
- local pn = rawget(properties,n)
- if pn then
- local dp = nil
- local dr = nil
- local ni = ilist[i]
- local p = nil
- if ni == "injections" then
- p = getprev(n)
- if p then
- local id = getid(p)
- if id == disc_code then
- dp = getfield(p,"post")
- dr = getfield(p,"replace")
- end
- end
- end
- if dp then
- local i = rawget(pn,"postinjections")
- if i then
- local leftkern = i.leftkern
- if leftkern and leftkern ~= 0 then
- local t = find_tail(dp)
- insert_node_after(dp,t,newkern(leftkern))
- setfield(p,"post",dp) -- currently we need to force a tail refresh
- end
- end
- end
- if dr then
- local i = rawget(pn,"replaceinjections")
- if i then
- local leftkern = i.leftkern
- if leftkern and leftkern ~= 0 then
- local t = find_tail(dr)
- insert_node_after(dr,t,newkern(leftkern))
- setfield(p,"replace",dr) -- currently we need to force a tail refresh
- end
- end
- else
- local i = rawget(pn,ni)
- if i then
- local leftkern = i.leftkern
- if leftkern and leftkern ~= 0 then
- insert_node_before(head,n,newkern(leftkern)) -- type 0/2
- end
- local rightkern = i.rightkern
- if rightkern and rightkern ~= 0 then
- insert_node_after(head,n,newkern(rightkern)) -- type 0/2
- end
- end
- end
- end
- end
-end
-
-local function inject_everything(head,where)
- head = tonut(head)
- if trace_injections then
- trace(head,"everything")
- end
- local glyphs, glyphi, nofglyphs, marks, marki, nofmarks = collect_glyphs(head,nofregisteredpairs > 0)
- if nofglyphs > 0 then
- if nofregisteredcursives > 0 then
- inject_cursives(glyphs,glyphi,nofglyphs)
- end
- if nofregisteredmarks > 0 then -- and nofmarks > 0
- inject_marks(marks,marki,nofmarks)
- end
- inject_kerns(head,glyphs,glyphi,nofglyphs)
- end
- if nofmarks > 0 then
- inject_kerns(head,marks,marki,nofmarks)
- end
- if keepregisteredcounts then
- keepregisteredcounts = false
- else
- nofregisteredkerns = 0
- nofregisteredpairs = 0
- nofregisteredmarks = 0
- nofregisteredcursives = 0
- end
- return tonode(head), true
-end
-
--- G +D-pre G
--- D-post+
--- +D-replace+
---
--- G +D-pre +D-pre
--- D-post +D-post
--- +D-replace +D-replace
-
-local function inject_kerns_only(head,where)
- head = tonut(head)
- if trace_injections then
- trace(head,"kerns")
- end
- local n = head
- local p = nil -- disc node when non-nil
- while n do
- local id = getid(n)
- if id == glyph_code then
- if getsubtype(n) < 256 then
- local pn = rawget(properties,n)
- if pn then
- if p then
- local d = getfield(p,"post")
- if d then
- local i = rawget(pn,"postinjections")
- if i then
- local leftkern = i.leftkern
- if leftkern and leftkern ~= 0 then
- local t = find_tail(d)
- insert_node_after(d,t,newkern(leftkern))
- setfield(p,"post",d) -- currently we need to force a tail refresh
- end
- end
- end
- local d = getfield(p,"replace")
- if d then
- local i = rawget(pn,"replaceinjections")
- if i then
- local leftkern = i.leftkern
- if leftkern and leftkern ~= 0 then
- local t = find_tail(d)
- insert_node_after(d,t,newkern(leftkern))
- setfield(p,"replace",d) -- currently we need to force a tail refresh
- end
- end
- else
- local i = rawget(pn,"injections")
- if i then
- local leftkern = i.leftkern
- if leftkern and leftkern ~= 0 then
- setfield(p,"replace",newkern(leftkern))
- end
- end
- end
- else
- -- this is the most common case
- local i = rawget(pn,"injections")
- if i then
- local leftkern = i.leftkern
- if leftkern and leftkern ~= 0 then
- head = insert_node_before(head,n,newkern(leftkern))
- end
- end
- end
- end
- end
- p = nil
- elseif id == disc_code then
- local d = getfield(n,"pre")
- if d then
- local h = d
- for n in traverse_id(glyph_code,d) do
- if getsubtype(n) < 256 then
- local pn = rawget(properties,n)
- if pn then
- local i = rawget(pn,"preinjections")
- if i then
- local leftkern = i.leftkern
- if leftkern and leftkern ~= 0 then
- h = insert_node_before(h,n,newkern(leftkern))
- end
- end
- end
- else
- break
- end
- end
- if h ~= d then
- setfield(n,"pre",h)
- end
- end
- local d = getfield(n,"post")
- if d then
- local h = d
- for n in traverse_id(glyph_code,d) do
- if getsubtype(n) < 256 then
- local pn = rawget(properties,n)
- if pn then
- local i = rawget(pn,"postinjections")
- if i then
- local leftkern = i.leftkern
- if leftkern and leftkern ~= 0 then
- h = insert_node_before(h,n,newkern(leftkern))
- end
- end
- end
- else
- break
- end
- end
- if h ~= d then
- setfield(n,"post",h)
- end
- end
- local d = getfield(n,"replace")
- if d then
- local h = d
- for n in traverse_id(glyph_code,d) do
- if getsubtype(n) < 256 then
- local pn = rawget(properties,n)
- if pn then
- local i = rawget(pn,"replaceinjections")
- if i then
- local leftkern = i.leftkern
- if leftkern and leftkern ~= 0 then
- h = insert_node_before(h,n,newkern(leftkern))
- end
- end
- end
- else
- break
- end
- end
- if h ~= d then
- setfield(n,"replace",h)
- end
- end
- p = n
- else
- p = nil
- end
- n = getnext(n)
- end
- --
- if keepregisteredcounts then
- keepregisteredcounts = false
- else
- nofregisteredkerns = 0
- end
- return tonode(head), true
-end
-
-local function inject_pairs_only(head,where)
- head = tonut(head)
- if trace_injections then
- trace(head,"pairs")
- end
- local n = head
- local p = nil -- disc node when non-nil
- while n do
- local id = getid(n)
- if id == glyph_code then
- if getsubtype(n) < 256 then
- local pn = rawget(properties,n)
- if pn then
- if p then
- local d = getfield(p,"post")
- if d then
- local i = rawget(pn,"postinjections")
- if i then
- local leftkern = i.leftkern
- if leftkern and leftkern ~= 0 then
- local t = find_tail(d)
- insert_node_after(d,t,newkern(leftkern))
- setfield(p,"post",d) -- currently we need to force a tail refresh
- end
- -- local rightkern = i.rightkern
- -- if rightkern and rightkern ~= 0 then
- -- insert_node_after(head,n,newkern(rightkern))
- -- n = getnext(n) -- to be checked
- -- end
- end
- end
- local d = getfield(p,"replace")
- if d then
- local i = rawget(pn,"replaceinjections")
- if i then
- local leftkern = i.leftkern
- if leftkern and leftkern ~= 0 then
- local t = find_tail(d)
- insert_node_after(d,t,newkern(leftkern))
- setfield(p,"replace",d) -- currently we need to force a tail refresh
- end
- -- local rightkern = i.rightkern
- -- if rightkern and rightkern ~= 0 then
- -- insert_node_after(head,n,newkern(rightkern))
- -- n = getnext(n) -- to be checked
- -- end
- end
- else
- local i = rawget(pn,"injections")
- if i then
- local leftkern = i.leftkern
- if leftkern and leftkern ~= 0 then
- setfield(p,"replace",newkern(leftkern))
- end
- -- local rightkern = i.rightkern
- -- if rightkern and rightkern ~= 0 then
- -- insert_node_after(head,n,newkern(rightkern))
- -- n = getnext(n) -- to be checked
- -- end
- end
- end
- else
- -- this is the most common case
- local i = rawget(pn,"injections")
- if i then
- local leftkern = i.leftkern
- if leftkern and leftkern ~= 0 then
- head = insert_node_before(head,n,newkern(leftkern))
- end
- local rightkern = i.rightkern
- if rightkern and rightkern ~= 0 then
- insert_node_after(head,n,newkern(rightkern))
- n = getnext(n) -- to be checked
- end
- local yoffset = i.yoffset
- if yoffset and yoffset ~= 0 then
- setfield(n,"yoffset",yoffset)
- end
- end
- end
- end
- end
- p = nil
- elseif id == disc_code then
- local d = getfield(n,"pre")
- if d then
- local h = d
- for n in traverse_id(glyph_code,d) do
- if getsubtype(n) < 256 then
- local pn = rawget(properties,n)
- if pn then
- local i = rawget(pn,"preinjections")
- if i then
- local leftkern = i.leftkern
- if leftkern and leftkern ~= 0 then
- h = insert_node_before(h,n,newkern(leftkern))
- end
- local rightkern = i.rightkern
- if rightkern and rightkern ~= 0 then
- insert_node_after(head,n,newkern(rightkern))
- n = getnext(n) -- to be checked
- end
- local yoffset = i.yoffset
- if yoffset and yoffset ~= 0 then
- setfield(n,"yoffset",yoffset)
- end
- end
- end
- else
- break
- end
- end
- if h ~= d then
- setfield(n,"pre",h)
- end
- end
- local d = getfield(n,"post")
- if d then
- local h = d
- for n in traverse_id(glyph_code,d) do
- if getsubtype(n) < 256 then
- local pn = rawget(properties,n)
- if pn then
- local i = rawget(pn,"postinjections")
- if i then
- local leftkern = i.leftkern
- if leftkern and leftkern ~= 0 then
- h = insert_node_before(h,n,newkern(leftkern))
- end
- local rightkern = i.rightkern
- if rightkern and rightkern ~= 0 then
- insert_node_after(head,n,newkern(rightkern))
- n = getnext(n) -- to be checked
- end
- local yoffset = i.yoffset
- if yoffset and yoffset ~= 0 then
- setfield(n,"yoffset",yoffset)
- end
- end
- end
- else
- break
- end
- end
- if h ~= d then
- setfield(n,"post",h)
- end
- end
- local d = getfield(n,"replace")
- if d then
- local h = d
- for n in traverse_id(glyph_code,d) do
- if getsubtype(n) < 256 then
- local pn = rawget(properties,n)
- if pn then
- local i = rawget(pn,"replaceinjections")
- if i then
- local leftkern = i.leftkern
- if leftkern and leftkern ~= 0 then
- h = insert_node_before(h,n,newkern(leftkern))
- end
- local rightkern = i.rightkern
- if rightkern and rightkern ~= 0 then
- insert_node_after(head,n,newkern(rightkern))
- n = getnext(n) -- to be checked
- end
- local yoffset = i.yoffset
- if yoffset and yoffset ~= 0 then
- setfield(n,"yoffset",yoffset)
- end
- end
- end
- else
- break
- end
- end
- if h ~= d then
- setfield(n,"replace",h)
- end
- end
- p = n
- else
- p = nil
- end
- n = getnext(n)
- end
- --
- if keepregisteredcounts then
- keepregisteredcounts = false
- else
- nofregisteredpairs = 0
- nofregisteredkerns = 0
- end
- return tonode(head), true
-end
-
-function injections.handler(head,where)
- if nofregisteredmarks > 0 or nofregisteredcursives > 0 then
- return inject_everything(head,where)
- elseif nofregisteredpairs > 0 then
- return inject_pairs_only(head,where)
- elseif nofregisteredkerns > 0 then
- return inject_kerns_only(head,where)
- else
- return head, false
- end
-end
diff --git a/tex/context/base/mkiv/font-ldr.lua b/tex/context/base/mkiv/font-ldr.lua
deleted file mode 100644
index 175b4d0cc..000000000
--- a/tex/context/base/mkiv/font-ldr.lua
+++ /dev/null
@@ -1,70 +0,0 @@
-if not modules then modules = { } end modules ['font-ldr'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This module provides an experimental replacement for fontloader.to_table
--- but is not used that much.
-
-local fields = fontloader.fields
-
-if fields then
-
- local glyphfields
-
- local function get_glyphs(r)
- local t = { }
- local g = r.glyphs
- for i=1,r.glyphmax-1 do
- local gi = g[i]
- if gi then
- if not glyphfields then
- glyphfields = fields(gi)
- end
- local h = { }
- for i=1,#glyphfields do
- local s = glyphfields[i]
- h[s] = gi[s]
- end
- t[i] = h
- end
- end
- return t
- end
-
- local function to_table(r)
- local f = fields(r)
- if f then
- local t = { }
- for i=1,#f do
- local fi = f[i]
- local ri = r[fi]
- if not ri then
- -- skip
- elseif fi == "glyphs" then
- t.glyphs = get_glyphs(r)
- elseif fi == "subfonts" then
- t[fi] = ri
- ri.glyphs = get_glyphs(ri)
- else
- t[fi] = r[fi]
- end
- end
- return t
- end
- end
-
- -- currently glyphs, subfont-glyphs and the main table are userdata
-
- function fonts.to_table(raw)
- return to_table(raw)
- end
-
-else
-
- fonts.to_table = fontloader.to_table
-
-end
diff --git a/tex/context/base/mkiv/font-lib.mkvi b/tex/context/base/mkiv/font-lib.mkvi
index d9cae503b..7ae94c99b 100644
--- a/tex/context/base/mkiv/font-lib.mkvi
+++ b/tex/context/base/mkiv/font-lib.mkvi
@@ -34,8 +34,6 @@
\registerctxluafile{font-dsp}{1.001} % ... for this one
\registerctxluafile{font-off}{1.001} % the old loader
-% \registerctxluafile{font-tfm}{1.001}
-
\registerctxluafile{font-hsh}{1.001} % hashes used by context
\registerctxluafile{font-nod}{1.001}
@@ -58,7 +56,6 @@
\registerctxluafile{font-onr}{1.001}
\registerctxluafile{font-one}{1.001}
-%registerctxluafile{font-afm}{1.001}
\registerctxluafile{font-afk}{1.001}
% tfm
diff --git a/tex/context/base/mkiv/font-mat.mkvi b/tex/context/base/mkiv/font-mat.mkvi
index cbc78aca8..6ce782ee3 100644
--- a/tex/context/base/mkiv/font-mat.mkvi
+++ b/tex/context/base/mkiv/font-mat.mkvi
@@ -24,18 +24,16 @@
%D \macros
%D {textonly}
%D
-%D Traditionally math has a big impact on font definitions, mainly
-%D because we need to define alphabet variants using families and
-%D fonts. This means that one can easily get 10 fonts loaded per
-%D math size. In \MKIV\ we use a different approach: one family
-%D which has either a virtual font made of traditional fonts, or
-%D an \OPENTYPE\ font that has it all.
+%D Traditionally math has a big impact on font definitions, mainly because we need
+%D to define alphabet variants using families and fonts. This means that one can
+%D easily get 10 fonts loaded per math size. In \MKIV\ we use a different approach:
+%D one family which has either a virtual font made of traditional fonts, or an
+%D \OPENTYPE\ font that has it all.
%D
-%D We currently use only one math family but in the future we
-%D might consider using a second one for bold math. For the
-%D moment we keep the \MKII\ method of using a token register
-%D for definitions but we already dropped the text and symbols
-%D ones since they now live in the same family.
+%D We currently use only one math family but in the future we might consider using a
+%D second one for bold math. For the moment we keep the \MKII\ method of using a
+%D token register for definitions but we already dropped the text and symbols ones
+%D since they now live in the same family.
\newtoks \t_font_math_strategies
\newconditional\c_font_synchronize_math_fonts \settrue\c_font_synchronize_math_fonts
@@ -45,14 +43,12 @@
\unexpanded\def\textonly{\setfalse\c_font_synchronize_math_fonts} % document this
-%D The main math font definer. We have removed some optimized
-%D code simply because we now always have a fontclass. We could
-%D check for fontclass being default or empty and save a few
-%D tests but it does not help us when no math is defined.
-
-%D Because we want to keep mr=.. and mb=... settings (read: not
-%D break downward compatibility by enforcing mrlr etc) we need a
-%D bit more code that optimal.
+%D The main math font definer. We have removed some optimized code simply because we
+%D now always have a fontclass. We could check for fontclass being default or empty
+%D and save a few tests but it does not help us when no math is defined.
+%D
+%D Because we want to keep mr=.. and mb=... settings (read: not break downward
+%D compatibility by enforcing mrlr etc) we need a bit more code that optimal.
% todo: \c_font_fam_mr
@@ -75,33 +71,9 @@
\def\mathsizesuffix{\ifcase\fontface\or\mathtextsuffix\or\mathscriptsuffix\or\mathscriptscriptsuffix\fi}
-% Beware: truefontname also does a fallback on defaultfontclass so there
-% can be some interference here, which is why we use a different method
-% for bold.
-
-% \def\font_helpers_set_math_family_a
-% {\ifcsname\??fontinstanceready\fontclass -\fontbody-\s!mm-\fontfamily-\fontsize\endcsname \setfalse\c_font_auto_size
-% \csname\??fontinstanceready\fontclass -\fontbody-\s!mm-\fontfamily-\fontsize\endcsname \else
-% \ifcsname\??fontinstanceready\fontclass -\fontbody-\s!mm-\fontfamily \endcsname \settrue \c_font_auto_size
-% \csname\??fontinstanceready\fontclass -\fontbody-\s!mm-\fontfamily \endcsname \else
-% \font_helpers_set_math_family_b
-% \fi\fi}
-
-% \def\font_helpers_set_math_family_b
-% {\ifcsname\??fontinstanceready\defaultfontclass-\fontbody-\s!mm-\fontfamily-\fontsize\endcsname \setfalse\c_font_auto_size
-% \csname\??fontinstanceready\defaultfontclass-\fontbody-\s!mm-\fontfamily-\fontsize\endcsname \else
-% \ifcsname\??fontinstanceready\defaultfontclass-\fontbody-\s!mm-\fontfamily \endcsname \settrue \c_font_auto_size
-% \csname\??fontinstanceready\defaultfontclass-\fontbody-\s!mm-\fontfamily \endcsname \else
-% \font_helpers_set_math_family_c
-% \fi\fi}
-
-% \def\font_helpers_set_math_family_c
-% {\ifcsname\??fontinstanceready \fontbody-\s!mm-\fontfamily-\fontsize\endcsname \setfalse\c_font_auto_size
-% \csname\??fontinstanceready \fontbody-\s!mm-\fontfamily-\fontsize\endcsname \else
-% \ifcsname\??fontinstanceready \fontbody-\s!mm-\fontfamily \endcsname \settrue \c_font_auto_size
-% \csname\??fontinstanceready \fontbody-\s!mm-\fontfamily \endcsname \else
-% \settrue \c_font_auto_size
-% \fi\fi}
+%D Beware: truefontname also does a fallback on defaultfontclass so there
+%D can be some interference here, which is why we use a different method
+%D for bold.
\def\font_helpers_set_math_family_a
{\ifcsname\??fontinstanceready\fontclass -\fontbody-\s!mm-\fontfamily-\fontsize\endcsname \setfalse\c_font_auto_size
@@ -133,21 +105,20 @@
{\let\savedfontbody\fontbody
\let\fontfamily#family%
% the order is important as we depend on known id's when completing fonts
- \let\mathsizesuffix\mathscriptscriptsuffix\let\fontface\!!plusthree\font_helpers_set_math_family_a\scriptscriptfont#mrtag\font
- \let\mathsizesuffix\mathscriptsuffix \let\fontface\!!plustwo \font_helpers_set_math_family_a\scriptfont #mrtag\font
- \let\mathsizesuffix\mathtextsuffix \let\fontface\!!plusone \font_helpers_set_math_family_a\textfont #mrtag\font
+ % enabling is needed when we have fallbacks which spoils the families
+ \let\mathsizesuffix\mathscriptscriptsuffix\let\fontface\!!plusthree
+ \font_helpers_set_math_family_a\scriptscriptfont#mrtag\font % defines
+ \font_helpers_set_math_family_a\scriptscriptfont#mrtag\font % enables
+ \let\mathsizesuffix\mathscriptsuffix \let\fontface\!!plustwo
+ \font_helpers_set_math_family_a\scriptfont #mrtag\font % defines
+ \font_helpers_set_math_family_a\scriptfont #mrtag\font % enables
+ \let\mathsizesuffix\mathtextsuffix \let\fontface\!!plusone
+ \font_helpers_set_math_family_a\textfont #mrtag\font % defines
+ \font_helpers_set_math_family_a\textfont #mrtag\font % enables
\let\mathsizesuffix\empty \let\fontface\!!zerocount
\let\fontbody\savedfontbody
\setfalse\c_font_auto_size}
-% \def\font_helpers_set_math_family_bold_a#font#mbfam#mrfam%
-% {\ifcsname\??fontinstanceready\fontclass-\fontbody-\s!mm-\fontfamily-\fontsize\endcsname \setfalse\c_font_auto_size
-% \csname\??fontinstanceready\fontclass-\fontbody-\s!mm-\fontfamily-\fontsize\endcsname #font#mbfam\font \else
-% \ifcsname\??fontinstanceready\fontclass-\fontbody-\s!mm-\fontfamily \endcsname \settrue \c_font_auto_size
-% \csname\??fontinstanceready\fontclass-\fontbody-\s!mm-\fontfamily \endcsname #font#mbfam\font \else
-% #font#mbfam#font#mrfam%
-% \fi\fi}
-
\def\font_helpers_set_math_family_bold_a#font#mbfam#mrfam%
{\ifcsname\??fontinstanceready\fontclass-\fontbody-\s!mm-\fontfamily-\fontsize\endcsname \setfalse\c_font_auto_size
\lastnamedcs #font#mbfam\font \else
@@ -161,9 +132,15 @@
\let\defaultfontclass\fontclass % else truefontname falls back on the wrong one
\let\savedfontbody\fontbody
\let\fontfamily#familytag%
- \let\mathsizesuffix\mathscriptscriptsuffix\let\fontface\!!plusthree\font_helpers_set_math_family_bold_a\scriptscriptfont#mbfam#mrfam%
- \let\mathsizesuffix\mathscriptsuffix \let\fontface\!!plustwo \font_helpers_set_math_family_bold_a\scriptfont #mbfam#mrfam%
- \let\mathsizesuffix\mathtextsuffix \let\fontface\!!plusone \font_helpers_set_math_family_bold_a\textfont #mbfam#mrfam%
+ \let\mathsizesuffix\mathscriptscriptsuffix\let\fontface\!!plusthree
+ \font_helpers_set_math_family_bold_a\scriptscriptfont#mbfam#mrfam% defines
+ \font_helpers_set_math_family_bold_a\scriptscriptfont#mbfam#mrfam% enables
+ \let\mathsizesuffix\mathscriptsuffix \let\fontface\!!plustwo
+ \font_helpers_set_math_family_bold_a\scriptfont #mbfam#mrfam% defines
+ \font_helpers_set_math_family_bold_a\scriptfont #mbfam#mrfam% enables
+ \let\mathsizesuffix\mathtextsuffix \let\fontface\!!plusone
+ \font_helpers_set_math_family_bold_a\textfont #mbfam#mrfam% defines
+ \font_helpers_set_math_family_bold_a\textfont #mbfam#mrfam% enables
\let\mathsizesuffix\empty \let\fontface\!!zerocount
\let\fontbody\savedfontbody
\let\defaultfontclass\savedfontclass
@@ -203,9 +180,8 @@
\font_helpers_set_math_family_bold_indeed#mbfam#familytag#mrfam%
\fi}
-%D It can happen that we use a bodyfont with no math in which case
-%D we have a problem with setting the global bodyfont size in the
-%D page builder. For instance in:
+%D It can happen that we use a bodyfont with no math in which case we have a problem
+%D with setting the global bodyfont size in the page builder. For instance in:
%D
%D \starttext
%D \definetypeface[test][rm][serif][pagella][default]
@@ -213,9 +189,9 @@
%D test
%D \stoptext
%D
-%D This is why we need the check. At the cost of some extra checking we gain a little
-%D in restoring global states and, what's more important, we get rid of large math parameter
-%D push/pop in tracingall when not needed.
+%D This is why we need the check. At the cost of some extra checking we gain a
+%D little in restoring global states and, what's more important, we get rid of large
+%D math parameter push/pop in tracingall when not needed.
\def\font_helpers_preset_math_family_indeed#fam#familytag%
{\expandafter\let\expandafter\v_font_math_one\csname\??fontinstanceclass\fontclass-\fontbody-\s!mm-#familytag-\fontsize-1\endcsname
@@ -235,13 +211,14 @@
\let\font_helpers_reset_fontclass_math_families\gobbleoneargument
-% It would be nice if characters could be defined in a neutral way (say fam 255) and
-% be mapped to a real family during noad list construction. However, this changes
-% tex in critical places so for the moment we simulate this using manipulation.
-
-% For tracing purposes we use three families but in l2r mode 1 and 2 are copies of 0
-% while in rl mode 0 is a copy of 1. There is no real overhead involved in this. This
-% also permits different font definitions for normal and mixed.
+%D It would be nice if characters could be defined in a neutral way (say fam 255)
+%D and be mapped to a real family during noad list construction. However, this
+%D changes tex in critical places so for the moment we simulate this using
+%D manipulation.
+%D
+%D For tracing purposes we use three families but in l2r mode 1 and 2 are copies of
+%D 0 while in rl mode 0 is a copy of 1. There is no real overhead involved in this.
+%D This also permits different font definitions for normal and mixed.
\let\m_font_class_direction\empty
\let\m_font_class_features \empty
@@ -439,10 +416,10 @@
\font_helpers_synchronize_math_bold_strategy
\to \everymathematics
-% Bold is somewhat special as we might want both full-bold-math mixed
-% regular-math, as well as automatic adaption to outer bold (in titles
-% and inline text bold) so we will need explicit switches as well as
-% an automatic one. (We will use lucida as an example.)
+%D Bold is somewhat special as we might want both full-bold-math mixed regular-math,
+%D as well as automatic adaption to outer bold (in titles and inline text bold) so
+%D we will need explicit switches as well as an automatic one. (We will use lucida
+%D as an example.)
\ifdefined\mathdefault \else \let\mathdefault\relax \fi
diff --git a/tex/context/base/mkiv/font-ocl.lua b/tex/context/base/mkiv/font-ocl.lua
index 2ecf1ba42..b3f368836 100644
--- a/tex/context/base/mkiv/font-ocl.lua
+++ b/tex/context/base/mkiv/font-ocl.lua
@@ -1,6 +1,6 @@
if not modules then modules = { } end modules ['font-ocl'] = {
version = 1.001,
- comment = "companion to font-otf.lua (context)",
+ comment = "companion to font-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
diff --git a/tex/context/base/mkiv/font-odk.lua b/tex/context/base/mkiv/font-odk.lua
deleted file mode 100644
index c34efc120..000000000
--- a/tex/context/base/mkiv/font-odk.lua
+++ /dev/null
@@ -1,904 +0,0 @@
--- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
--- We keep the original around for a while so that we can check it --
--- when the above code does it wrong (data tables are not included). --
--- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
-
--- author : Kai Eigner, TAT Zetwerk
--- copyright : TAT Zetwerk
--- comment : see font-odv.lua for current implementation
-
--- local state = attributes.private('state')
--- local sylnr = attributes.private('syllabe')
---
--- local function install_dev(tfmdata)
--- local features = tfmdata.resources.features
--- local sequences = tfmdata.resources.sequences
---
--- local insertpos = 1
--- for s=1,#sequences do -- classify chars
--- for k in pairs(basic_shaping_forms) do
--- if sequences[s].features and ( sequences[s].features[k] or sequences[s].features.locl ) then insertpos = s + 1 end
--- end
--- end
---
--- features.gsub["dev2_reorder_matras"] = { ["dev2"] = { ["dflt"] = true } }
--- features.gsub["dev2_reorder_reph"] = { ["dev2"] = { ["dflt"] = true } }
--- features.gsub["dev2_reorder_pre_base_reordering_consonants"] = { ["dev2"] = { ["dflt"] = true } }
--- features.gsub["remove_joiners"] = { ["deva"] = { ["dflt"] = true }, ["dev2"] = { ["dflt"] = true } }
---
--- local sequence_dev2_reorder_matras = {
--- chain = 0,
--- features = { dev2_reorder_matras = { dev2 = { dflt = true } } },
--- flags = { false, false, false, false },
--- name = "dev2_reorder_matras",
--- subtables = { "dev2_reorder_matras" },
--- type = "dev2_reorder_matras",
--- }
--- local sequence_dev2_reorder_reph = {
--- chain = 0,
--- features = { dev2_reorder_reph = { dev2 = { dflt = true } } },
--- flags = { false, false, false, false },
--- name = "dev2_reorder_reph",
--- subtables = { "dev2_reorder_reph" },
--- type = "dev2_reorder_reph",
--- }
--- local sequence_dev2_reorder_pre_base_reordering_consonants = {
--- chain = 0,
--- features = { dev2_reorder_pre_base_reordering_consonants = { dev2 = { dflt = true } } },
--- flags = { false, false, false, false },
--- name = "dev2_reorder_pre_base_reordering_consonants",
--- subtables = { "dev2_reorder_pre_base_reordering_consonants" },
--- type = "dev2_reorder_pre_base_reordering_consonants",
--- }
--- local sequence_remove_joiners = {
--- chain = 0,
--- features = { remove_joiners = { deva = { dflt = true }, dev2 = { dflt = true } } },
--- flags = { false, false, false, false },
--- name = "remove_joiners",
--- subtables = { "remove_joiners" },
--- type = "remove_joiners",
--- }
--- table.insert(sequences, insertpos, sequence_dev2_reorder_pre_base_reordering_consonants)
--- table.insert(sequences, insertpos, sequence_dev2_reorder_reph)
--- table.insert(sequences, insertpos, sequence_dev2_reorder_matras)
--- table.insert(sequences, insertpos, sequence_remove_joiners)
--- end
---
--- local function deva_reorder(head,start,stop,font,attr)
--- local tfmdata = fontdata[font]
--- local lookuphash = tfmdata.resources.lookuphash
--- local sequences = tfmdata.resources.sequences
---
--- if not lookuphash["remove_joiners"] then install_dev(tfmdata) end --install Devanagari-features
---
--- local sharedfeatures = tfmdata.shared.features
--- sharedfeatures["remove_joiners"] = true
--- local datasets = otf.dataset(tfmdata,font,attr)
---
--- lookuphash["remove_joiners"] = { [0x200C] = true, [0x200D] = true }
---
--- local current, n, base, firstcons, lastcons, basefound = start, start.next, nil, nil, nil, false
--- local reph, vattu = false, false
--- for s=1,#sequences do
--- local dataset = datasets[s]
--- featurevalue = dataset and dataset[1]
--- if featurevalue and dataset[4] == "rphf" then reph = true end
--- if featurevalue and dataset[4] == "blwf" then vattu = true end
--- end
--- if ra[start.char] and halant[n.char] and reph then -- if syllable starts with Ra + H and script has 'Reph' then exclude Reph from candidates for base consonants
--- if n == stop then return head, stop end
--- if zwj[n.next.char] then
--- current = start
--- else
--- current = n.next
--- set_attribute(start,state,5) -- rphf
--- end
--- end
---
--- if nbsp[current.char] then --Stand Alone cluster
--- if current == stop then
--- stop = stop.prev
--- head = node.remove(head, current)
--- node.free(current)
--- return head, stop
--- else
--- base, firstcons, lastcons = current, current, current
--- current = current.next
--- if current ~= stop then
--- if nukta[current.char] then current = current.next end
--- if zwj[current.char] then
--- if current ~= stop and current.next ~= stop and halant[current.next.char] then
--- current = current.next
--- local tmp = current.next.next
--- local changestop = current.next == stop
--- local tempcurrent = node.copy(current.next)
--- tempcurrent.next = node.copy(current)
--- tempcurrent.next.prev = tempcurrent
--- set_attribute(tempcurrent,state,8) --blwf
--- tempcurrent = nodes.handlers.characters(tempcurrent)
--- unset_attribute(tempcurrent,state)
--- if current.next.char == tempcurrent.char then
--- node.flush_list(tempcurrent)
--- local n = node.copy(current)
--- current.char = dotted_circle
--- head = node.insert_after(head, current, n)
--- else
--- current.char = tempcurrent.char -- (assumes that result of blwf consists of one node)
--- local freenode = current.next
--- current.next = tmp
--- tmp.prev = current
--- node.free(freenode)
--- node.flush_list(tempcurrent)
--- if changestop then stop = current end
--- end
--- end
--- end
--- end
--- end
--- end
---
--- while not basefound do -- find base consonant
--- if consonant[current.char] then
--- set_attribute(current, state, 6) -- half
--- if not firstcons then firstcons = current end
--- lastcons = current
--- if not base then
--- base = current
--- else --check whether consonant has below-base (or post-base) form
--- local baseform = true
--- for s=1,#sequences do
--- local sequence = sequences[s]
--- local dataset = datasets[s]
--- featurevalue = dataset and dataset[1]
--- if featurevalue and dataset[4] == "blwf" then
--- local subtables = sequence.subtables
--- for i=1,#subtables do
--- local lookupname = subtables[i]
--- local lookupcache = lookuphash[lookupname]
--- if lookupcache then
--- local lookupmatch = lookupcache[current.char]
--- if lookupmatch then
--- set_attribute(current, state, 8) -- blwf
--- baseform = false
--- end
--- end
--- end
--- end
--- end
--- if baseform then base = current end
--- end
--- end
--- basefound = current == stop
--- current = current.next
--- end
--- if base ~= lastcons then -- if base consonant is not last one then move halant from base consonant to last one
--- n = base.next
--- if nukta[n.char] then n = n.next end
--- if halant[n.char] then
--- if lastcons ~= stop then
--- local ln = lastcons.next
--- if nukta[ln.char] then lastcons = ln end
--- end
--- local np, nn, ln = n.prev, n.next, lastcons.next
--- np.next = n.next
--- nn.prev = n.prev
--- lastcons.next = n
--- if ln then ln.prev = n end
--- n.next = ln
--- n.prev = lastcons
--- if lastcons == stop then stop = n end
--- end
--- end
---
--- n = start.next
--- if ra[start.char] and halant[n.char] and not ( n ~= stop and ( zwj[n.next.char] or zwnj[n.next.char] ) ) then -- if syllable starts with Ra + H then move this combination so that it follows either: the post-base 'matra' (if any) or the base consonant
--- local matra = base
--- if base ~= stop and dependent_vowel[base.next.char] then matra = base.next end
--- local sp, nn, mn = start.prev, n.next, matra.next
--- if sp then sp.next = nn end
--- nn.prev = sp
--- matra.next = start
--- start.prev = matra
--- n.next = mn
--- if mn then mn.prev = n end
--- if head == start then head = nn end
--- start = nn
--- if matra == stop then stop = n end
--- end
---
--- local current = start
--- while current ~= stop do
--- if halant[current.next.char] and current.next ~= stop and zwnj[current.next.next.char] then unset_attribute(current, state) end
--- current = current.next
--- end
---
--- if has_attribute(base, state) and base ~= stop and halant[base.next.char] and not ( base.next ~= stop and zwj[base.next.next.char] ) then unset_attribute(base, state) end
---
--- local current, allreordered, moved = start, false, { [base] = true }
--- local a, b, p, bn = base, base, base, base.next
--- if base ~= stop and nukta[bn.char] then a, b, p = bn, bn, bn end
--- while not allreordered do
--- local c, n, l = current, current.next, nil --current is always consonant
--- if c ~= stop and nukta[n.char] then c = n n = n.next end
--- if c ~= stop and halant[n.char] then c = n n = n.next end
--- while c ~= stop and dependent_vowel[n.char] do c = n n = n.next end
--- if c ~= stop and vowel_modifier[n.char] then c = n n = n.next end
--- if c ~= stop and stress_tone_mark[n.char] then c = n n = n.next end
--- local bp, cn = firstcons.prev, current.next
--- while cn ~= c.next do -- move pre-base matras...
--- if pre_mark[cn.char] then
--- if bp then bp.next = cn end
--- cn.prev.next = cn.next
--- if cn.next then cn.next.prev = cn.prev end
--- if cn == stop then stop = cn.prev end
--- cn.prev = bp
--- cn.next = firstcons
--- firstcons.prev = cn
--- if firstcons == start then
--- if head == start then head = cn end
--- start = cn
--- end
--- break
--- end
--- cn = cn.next
--- end
--- allreordered = c == stop
--- current = c.next
--- end
---
--- if reph or vattu then
--- local current, cns = start, nil
--- while current ~= stop do
--- local c, n = current, current.next
--- if ra[current.char] and halant[n.char] then
--- c, n = n, n.next
--- local b, bn = base, base
--- while bn ~= stop do
--- if dependent_vowel[bn.next.char] then b = bn.next end
--- bn = bn.next
--- end
--- if has_attribute(current,state,attribute) == 5 then -- position Reph (Ra + H) after post-base 'matra' (if any) since these become marks on the 'matra', not on the base glyph
--- if b ~= current then
--- if current == start then
--- if head == start then head = n end
--- start = n
--- end
--- if b == stop then stop = c end
--- if current.prev then current.prev.next = n end
--- if n then n.prev = current.prev end
--- c.next = b.next
--- if b.next then b.next.prev = c end
--- b.next = current
--- current.prev = b
--- end
--- elseif cns and cns.next ~= current then -- position below-base Ra (vattu) following the consonants on which it is placed (either the base consonant or one of the pre-base consonants)
--- local cp, cnsn = current.prev, cns.next
--- if cp then cp.next = n end
--- if n then n.prev = cp end
--- cns.next = current
--- current.prev = cns
--- c.next = cnsn
--- if cnsn then cnsn.prev = c end
--- if c == stop then stop = cp break end
--- current = n.prev
--- end
--- elseif consonant[current.char] or nbsp[current.char] then
--- cns = current
--- if halant[cns.next.char] then cns = cns.next end
--- end
--- current = current.next
--- end
--- end
---
--- if nbsp[base.char] then
--- head = node.remove(head, base)
--- node.free(base)
--- end
---
--- return head, stop
--- end
---
--- function dev2_reorder_matras(start,kind,lookupname,replacement)
--- local current = start
--- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do
--- if halant[current.char] and not has_attribute(current, state) then
--- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == start.font and has_attribute(current.next, sylnr) == has_attribute(start, sylnr) and ( zwj[current.next.char] or zwnj[current.next.char] ) then current = current.next end
--- local sn = start.next
--- start.next.prev = start.prev
--- if start.prev then start.prev.next = start.next end
--- if current.next then current.next.prev = start end
--- start.next = current.next
--- current.next = start
--- start.prev = current
--- start = sn
--- break
--- end
--- current = current.next
--- end
--- return start, true
--- end
---
--- function dev2_reorder_reph(start,kind,lookupname,replacement)
--- local current, sn = start.next, nil
--- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do --step 2
--- if halant[current.char] and not has_attribute(current, state) then
--- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == start.font and has_attribute(current.next, sylnr) == has_attribute(start, sylnr) and ( zwj[current.next.char] or zwnj[current.next.char] ) then current = current.next end
--- sn = start.next
--- start.next.prev = start.prev
--- if start.prev then start.prev.next = start.next end
--- if current.next then current.next.prev = start end
--- start.next = current.next
--- current.next = start
--- start.prev = current
--- start = sn
--- break
--- end
--- current = current.next
--- end
--- if not sn then
--- current = start.next
--- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do --step 4
--- if has_attribute(current, state) == 9 then --post-base
--- sn = start.next
--- start.next.prev = start.prev
--- if start.prev then start.prev.next = start.next end
--- start.prev = current.prev
--- current.prev.next = start
--- start.next = current
--- current.prev = start
--- start = sn
--- break
--- end
--- current = current.next
--- end
--- end
--- if not sn then
--- current = start.next
--- local c = nil
--- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do --step 5
--- if not c and ( above_mark[current.char] or below_mark[current.char] or post_mark[current.char] ) and ReorderClass[current.char] ~= "after subscript" then c = current end
--- current = current.next
--- end
--- if c then
--- sn = start.next
--- start.next.prev = start.prev
--- if start.prev then start.prev.next = start.next end
--- start.prev = c.prev
--- c.prev.next = start
--- start.next = c
--- c.prev = start
--- start = sn
--- end
--- end
--- if not sn then
--- current = start
--- while current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == start.font and has_attribute(current.next, sylnr) == has_attribute(start, sylnr) do --step 6
--- current = current.next
--- end
--- if start ~= current then
--- sn = start.next
--- start.next.prev = start.prev
--- if start.prev then start.prev.next = start.next end
--- if current.next then current.next.prev = start end
--- start.next = current.next
--- current.next = start
--- start.prev = current
--- start = sn
--- end
--- end
--- return start, true
--- end
---
--- function dev2_reorder_pre_base_reordering_consonants(start,kind,lookupname,replacement)
--- local current, sn = start, nil
--- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do
--- if halant[current.char] and not has_attribute(current, state) then
--- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == start.font and has_attribute(current.next, sylnr) == has_attribute(start, sylnr) and ( zwj[current.next.char] or zwnj[current.next.char] ) then current = current.next end
--- sn = start.next
--- start.next.prev = start.prev
--- if start.prev then start.prev.next = start.next end
--- if current.next then current.next.prev = start end
--- start.next = current.next
--- current.next = start
--- start.prev = current
--- start = sn
--- break
--- end
--- current = current.next
--- end
--- if not sn then
--- current = start.next
--- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do
--- if not consonant[current.char] and has_attribute(current, state) then --main
--- sn = start.next
--- start.next.prev = start.prev
--- if start.prev then start.prev.next = start.next end
--- start.prev = current.prev
--- current.prev.next = start
--- start.next = current
--- current.prev = start
--- start = sn
--- break
--- end
--- current = current.next
--- end
--- end
--- return start, true
--- end
---
--- function remove_joiners(start,kind,lookupname,replacement)
--- local stop = start.next
--- while stop and stop.id == glyph and stop.subtype<256 and stop.font == start.font and (zwj[stop.char] or zwnj[stop.char]) do stop = stop.next end
--- if stop then stop.prev.next = nil stop.prev = start.prev end
--- if start.prev then start.prev.next = stop end
--- node.flush_list(start)
--- return stop, true
--- end
---
--- local function dev2_reorder(head,start,stop,font,attr)
--- local tfmdata = fontdata[font]
--- local lookuphash = tfmdata.resources.lookuphash
--- local sequences = tfmdata.resources.sequences
---
--- if not lookuphash["remove_joiners"] then install_dev(tfmdata) end --install Devanagari-features
---
--- local sharedfeatures = tfmdata.shared.features
--- sharedfeatures["dev2_reorder_matras"] = true
--- sharedfeatures["dev2_reorder_reph"] = true
--- sharedfeatures["dev2_reorder_pre_base_reordering_consonants"] = true
--- sharedfeatures["remove_joiners"] = true
--- local datasets = otf.dataset(tfmdata,font,attr)
---
--- local reph, pre_base_reordering_consonants = false, nil
--- local halfpos, basepos, subpos, postpos = nil, nil, nil, nil
--- local locl = { }
---
--- for s=1,#sequences do -- classify chars
--- local sequence = sequences[s]
--- local dataset = datasets[s]
--- featurevalue = dataset and dataset[1]
--- if featurevalue and dataset[4] then
--- local subtables = sequence.subtables
--- for i=1,#subtables do
--- local lookupname = subtables[i]
--- local lookupcache = lookuphash[lookupname]
--- if lookupcache then
--- if dataset[4] == "rphf" then
--- if dataset[3] ~= 0 then --rphf is result of of chain
--- else
--- reph = lookupcache[0x0930] and lookupcache[0x0930][0x094D] and lookupcache[0x0930][0x094D]["ligature"]
--- end
--- end
--- if dataset[4] == "pref" and not pre_base_reordering_consonants then
--- for k, v in pairs(lookupcache[0x094D]) do
--- pre_base_reordering_consonants[k] = v and v["ligature"] --ToDo: reph might also be result of chain
--- end
--- end
--- local current = start
--- while current ~= stop.next do
--- if dataset[4] == "locl" then locl[current] = lookupcache[current.char] end --ToDo: locl might also be result of chain
--- if current ~= stop then
--- local c, n = locl[current] or current.char, locl[current.next] or current.next.char
--- if dataset[4] == "rphf" and lookupcache[c] and lookupcache[c][n] then --above-base: rphf Consonant + Halant
--- if current.next ~= stop and ( zwj[current.next.next.char] or zwnj[current.next.next.char] ) then --ZWJ and ZWNJ prevent creation of reph
--- current = current.next
--- elseif current == start then
--- set_attribute(current,state,5)
--- end
--- current = current.next
--- end
--- if dataset[4] == "half" and lookupcache[c] and lookupcache[c][n] then --half forms: half Consonant + Halant
--- if current.next ~= stop and zwnj[current.next.next.char] then --ZWNJ prevent creation of half
--- current = current.next
--- else
--- set_attribute(current,state,6)
--- if not halfpos then halfpos = current end
--- end
--- current = current.next
--- end
--- if dataset[4] == "pref" and lookupcache[c] and lookupcache[c][n] then --pre-base: pref Halant + Consonant
--- set_attribute(current,state,7)
--- set_attribute(current.next,state,7)
--- current = current.next
--- end
--- if dataset[4] == "blwf" and lookupcache[c] and lookupcache[c][n] then --below-base: blwf Halant + Consonant
--- set_attribute(current,state,8)
--- set_attribute(current.next,state,8)
--- current = current.next
--- subpos = current
--- end
--- if dataset[4] == "pstf" and lookupcache[c] and lookupcache[c][n] then --post-base: pstf Halant + Consonant
--- set_attribute(current,state,9)
--- set_attribute(current.next,state,9)
--- current = current.next
--- postpos = current
--- end
--- end
--- current = current.next
--- end
--- end
--- end
--- end
--- end
---
--- lookuphash["dev2_reorder_matras"] = pre_mark
--- lookuphash["dev2_reorder_reph"] = { [reph] = true }
--- lookuphash["dev2_reorder_pre_base_reordering_consonants"] = pre_base_reordering_consonants or { }
--- lookuphash["remove_joiners"] = { [0x200C] = true, [0x200D] = true }
---
--- local current, base, firstcons = start, nil, nil
--- if has_attribute(start,state) == 5 then current = start.next.next end -- if syllable starts with Ra + H and script has 'Reph' then exclude Reph from candidates for base consonants
---
--- if current ~= stop.next and nbsp[current.char] then --Stand Alone cluster
--- if current == stop then
--- stop = stop.prev
--- head = node.remove(head, current)
--- node.free(current)
--- return head, stop
--- else
--- base = current
--- current = current.next
--- if current ~= stop then
--- if nukta[current.char] then current = current.next end
--- if zwj[current.char] then
--- if current ~= stop and current.next ~= stop and halant[current.next.char] then
--- current = current.next
--- local tmp = current.next.next
--- local changestop = current.next == stop
--- current.next.next = nil
--- set_attribute(current,state,7) --pref
--- current = nodes.handlers.characters(current)
--- set_attribute(current,state,8) --blwf
--- current = nodes.handlers.characters(current)
--- set_attribute(current,state,9) --pstf
--- current = nodes.handlers.characters(current)
--- unset_attribute(current,state)
--- if halant[current.char] then
--- current.next.next = tmp
--- local nc = node.copy(current)
--- current.char = dotted_circle
--- head = node.insert_after(head, current, nc)
--- else
--- current.next = tmp -- (assumes that result of pref, blwf, or pstf consists of one node)
--- if changestop then stop = current end
--- end
--- end
--- end
--- end
--- end
--- else --not Stand Alone cluster
--- while current ~= stop.next do -- find base consonant
--- if consonant[current.char] and not ( current ~= stop and halant[current.next.char] and current.next ~= stop and zwj[current.next.next.char] ) then
--- if not firstcons then firstcons = current end
--- if not ( has_attribute(current, state) == 7 or has_attribute(current, state) == 8 or has_attribute(current, state) == 9 ) then base = current end --check whether consonant has below-base or post-base form or is pre-base reordering Ra
--- end
--- current = current.next
--- end
--- if not base then
--- base = firstcons
--- end
--- end
---
--- if not base then
--- if has_attribute(start, state) == 5 then unset_attribute(start, state) end
--- return head, stop
--- else
--- if has_attribute(base, state) then unset_attribute(base, state) end
--- basepos = base
--- end
--- if not halfpos then halfpos = base end
--- if not subpos then subpos = base end
--- if not postpos then postpos = subpos or base end
---
--- --Matra characters are classified and reordered by which consonant in a conjunct they have affinity for
--- local moved = { }
--- current = start
--- while current ~= stop.next do
--- local char, target, cn = locl[current] or current.char, nil, current.next
--- if not moved[current] and dependent_vowel[char] then
--- if pre_mark[char] then -- Before first half form in the syllable
--- moved[current] = true
--- if current.prev then current.prev.next = current.next end
--- if current.next then current.next.prev = current.prev end
--- if current == stop then stop = current.prev end
--- if halfpos == start then
--- if head == start then head = current end
--- start = current
--- end
--- if halfpos.prev then halfpos.prev.next = current end
--- current.prev = halfpos.prev
--- halfpos.prev = current
--- current.next = halfpos
--- halfpos = current
--- elseif above_mark[char] then -- After main consonant
--- target = basepos
--- if subpos == basepos then subpos = current end
--- if postpos == basepos then postpos = current end
--- basepos = current
--- elseif below_mark[char] then -- After subjoined consonants
--- target = subpos
--- if postpos == subpos then postpos = current end
--- subpos = current
--- elseif post_mark[char] then -- After post-form consonant
--- target = postpos
--- postpos = current
--- end
--- if ( above_mark[char] or below_mark[char] or post_mark[char] ) and current.prev ~= target then
--- if current.prev then current.prev.next = current.next end
--- if current.next then current.next.prev = current.prev end
--- if current == stop then stop = current.prev end
--- if target.next then target.next.prev = current end
--- current.next = target.next
--- target.next = current
--- current.prev = target
--- end
--- end
--- current = cn
--- end
---
--- --Reorder marks to canonical order: Adjacent nukta and halant or nukta and vedic sign are always repositioned if necessary, so that the nukta is first.
--- local current, c = start, nil
--- while current ~= stop do
--- if halant[current.char] or stress_tone_mark[current.char] then
--- if not c then c = current end
--- else
--- c = nil
--- end
--- if c and nukta[current.next.char] then
--- if head == c then head = current.next end
--- if stop == current.next then stop = current end
--- if c.prev then c.prev.next = current.next end
--- current.next.prev = c.prev
--- current.next = current.next.next
--- if current.next.next then current.next.next.prev = current end
--- c.prev = current.next
--- current.next.next = c
--- end
--- if stop == current then break end
--- current = current.next
--- end
---
--- if nbsp[base.char] then
--- head = node.remove(head, base)
--- node.free(base)
--- end
---
--- return head, stop
--- end
---
--- function fonts.analyzers.methods.deva(head,font,attr)
--- local orighead = head
--- local current, start, done = head, true, false
--- while current do
--- if current.id == glyph and current.subtype<256 and current.font == font then
--- done = true
--- local syllablestart, syllableend = current, nil
---
--- local c = current --Checking Stand Alone cluster (this behavior is copied from dev2)
--- if ra[c.char] and c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] and c.next.next and c.next.next.id == glyph and c.next.next.subtype<256 and c.next.next.font == font then c = c.next.next end
--- if nbsp[c.char] and ( not current.prev or current.prev.id ~= glyph or current.prev.subtype>=256 or current.prev.font ~= font or
--- ( not consonant[current.prev.char] and not independent_vowel[current.prev.char] and not dependent_vowel[current.prev.char] and
--- not vowel_modifier[current.prev.char] and not stress_tone_mark[current.prev.char] and not nukta[current.prev.char] and not halant[current.prev.char] )
--- ) then --Stand Alone cluster (at the start of the word only): #[Ra+H]+NBSP+[N]+[<[<ZWJ|ZWNJ>]+H+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
--- local n = c.next
--- if n and n.id == glyph and n.subtype<256 and n.font == font then
--- local ni = n.next
--- if ( zwj[n.char] or zwnj[n.char] ) and ni and ni.id == glyph and ni.subtype<256 and ni.font == font then n = ni ni = ni.next end
--- if halant[n.char] and ni and ni.id == glyph and ni.subtype<256 and ni.font == font and consonant[ni.char] then c = ni end
--- end
--- while c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and dependent_vowel[c.next.char] do c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and vowel_modifier[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
--- current = c.next
--- syllableend = c
--- if syllablestart ~= syllableend then
--- head, current = deva_reorder(head, syllablestart,syllableend,font,attr)
--- current = current.next
--- end
--- elseif consonant[current.char] then -- syllable containing consonant
--- prevc = true
--- while prevc do
--- prevc = false
--- local n = current.next
--- if n and n.id == glyph and n.subtype<256 and n.font == font and nukta[n.char] then n = n.next end
--- if n and n.id == glyph and n.subtype<256 and n.font == font and halant[n.char] then
--- local n = n.next
--- if n and n.id == glyph and n.subtype<256 and n.font == font and ( zwj[n.char] or zwnj[n.char] ) then n = n.next end
--- if n and n.id == glyph and n.subtype<256 and n.font == font and consonant[n.char] then
--- prevc = true
--- current = n
--- end
--- end
--- end
--- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == font and nukta[current.next.char] then current = current.next end -- nukta (not specified in Microsft Devanagari OpenType specification)
--- syllableend = current
--- current = current.next
--- if current and current.id == glyph and current.subtype<256 and current.font == font and halant[current.char] then -- syllable containing consonant without vowels: {C + [Nukta] + H} + C + H
--- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == font and ( zwj[current.next.char] or zwnj[current.next.char] ) then current = current.next end
--- syllableend = current
--- current = current.next
--- else -- syllable containing consonant with vowels: {C + [Nukta] + H} + C + [M] + [VM] + [SM]
--- if current and current.id == glyph and current.subtype<256 and current.font == font and dependent_vowel[current.char] then
--- syllableend = current
--- current = current.next
--- end
--- if current and current.id == glyph and current.subtype<256 and current.font == font and vowel_modifier[current.char] then
--- syllableend = current
--- current = current.next
--- end
--- if current and current.id == glyph and current.subtype<256 and current.font == font and stress_tone_mark[current.char] then
--- syllableend = current
--- current = current.next
--- end
--- end
--- if syllablestart ~= syllableend then
--- head, current = deva_reorder(head,syllablestart,syllableend,font,attr)
--- current = current.next
--- end
--- elseif current.id == glyph and current.subtype<256 and current.font == font and independent_vowel[current.char] then -- syllable without consonants: VO + [VM] + [SM]
--- syllableend = current
--- current = current.next
--- if current and current.id == glyph and current.subtype<256 and current.font == font and vowel_modifier[current.char] then
--- syllableend = current
--- current = current.next
--- end
--- if current and current.id == glyph and current.subtype<256 and current.font == font and stress_tone_mark[current.char] then
--- syllableend = current
--- current = current.next
--- end
--- else -- Syntax error
--- if pre_mark[current.char] or above_mark[current.char] or below_mark[current.char] or post_mark[current.char] then
--- local n = node.copy(current)
--- if pre_mark[current.char] then
--- n.char = dotted_circle
--- else
--- current.char = dotted_circle
--- end
--- head, current = node.insert_after(head, current, n)
--- end
--- current = current.next
--- end
--- else
--- current = current.next
--- end
--- start = false
--- end
---
--- return head, done
--- end
---
--- function fonts.analyzers.methods.dev2(head,font,attr)
--- local current, start, done, syl_nr = head, true, false, 0
--- while current do
--- local syllablestart, syllableend = nil, nil
--- if current.id == glyph and current.subtype<256 and current.font == font then
--- syllablestart = current
--- done = true
--- local c, n = current, current.next
--- if ra[current.char] and n and n.id == glyph and n.subtype<256 and n.font == font and halant[n.char] and n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font then c = n.next end
--- if independent_vowel[c.char] then --Vowel-based syllable: [Ra+H]+V+[N]+[<[<ZWJ|ZWNJ>]+H+C|ZWJ+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
--- n = c.next
--- local ni, nii = nil, nil
--- if n and n.id == glyph and n.subtype<256 and n.font == font and nukta[n.char] then n = n.next end
--- if n and n.id == glyph and n.subtype<256 and n.font == font then local ni = n.next end
--- if ni and ni.id == glyph and ni.subtype<256 and ni.font == font and ni.next and ni.next.id == glyph and ni.next.subtype<256 and ni.next.font == font then
--- nii = ni.next
--- if zwj[ni.char] and consonant[nii.char] then
--- c = nii
--- elseif (zwj[ni.char] or zwnj[ni.char]) and halant[nii.char] and nii.next and nii.next.id == glyph and nii.next.subtype<256 and nii.next.font == font and consonant[nii.next.char] then
--- c = nii.next
--- end
--- end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and dependent_vowel[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and vowel_modifier[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
--- current = c
--- syllableend = c
--- elseif nbsp[c.char] and ( not current.prev or current.prev.id ~= glyph or current.prev.subtype>=256 or current.prev.font ~= font or
--- ( not consonant[current.prev.char] and not independent_vowel[current.prev.char] and not dependent_vowel[current.prev.char] and
--- not vowel_modifier[current.prev.char] and not stress_tone_mark[current.prev.char] and not nukta[current.prev.char] and not halant[current.prev.char] )
--- ) then --Stand Alone cluster (at the start of the word only): #[Ra+H]+NBSP+[N]+[<[<ZWJ|ZWNJ>]+H+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
--- n = c.next
--- if n and n.id == glyph and n.subtype<256 and n.font == font then
--- local ni = n.next
--- if ( zwj[n.char] or zwnj[n.char] ) and ni and ni.id == glyph and ni.subtype<256 and ni.font == font then n = ni ni = ni.next end
--- if halant[n.char] and ni and ni.id == glyph and ni.subtype<256 and ni.font == font and consonant[ni.char] then c = ni end
--- end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and dependent_vowel[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and vowel_modifier[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
--- current = c
--- syllableend = c
--- elseif consonant[current.char] then --Consonant syllable: {C+[N]+<H+[<ZWNJ|ZWJ>]|<ZWNJ|ZWJ>+H>} + C+[N]+[A] + [< H+[<ZWNJ|ZWJ>] | {M}+[N]+[H]>]+[SM]+[(VD)]
--- c = current
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
--- n = c
--- while n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font and ( halant[n.next.char] or zwnj[n.next.char] or zwj[n.next.char] ) do
--- if halant[n.next.char] then
--- n = n.next
--- if n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font and ( zwnj[n.next.char] or zwj[n.next.char] ) then n = n.next end
--- else
--- if n.next.next and n.next.next.id == glyph and n.next.next.subtype<256 and n.next.next.font == font and halant[n.next.next.char] then n = n.next.next end
--- end
--- if n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font and consonant[n.next.char] then
--- n = n.next
--- if n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font and nukta[n.next.char] then n = n.next end
--- c = n
--- else
--- break
--- end
--- end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and anudatta[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then
--- c = c.next
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and ( zwnj[c.next.char] or zwj[c.next.char] ) then c = c.next end
--- else
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and dependent_vowel[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then c = c.next end
--- end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and vowel_modifier[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
--- current = c
--- syllableend = c
--- end
--- end
---
--- if syllableend then
--- syl_nr = syl_nr + 1
--- c = syllablestart
--- while c ~= syllableend.next do
--- set_attribute(c,sylnr,syl_nr)
--- c = c.next
--- end
--- end
--- if syllableend and syllablestart ~= syllableend then
--- head, current = dev2_reorder(head,syllablestart,syllableend,font,attr)
--- end
---
--- if not syllableend and not has_attribute(current, state) and current.id == glyph and current.subtype<256 and current.font == font then -- Syntax error
--- if pre_mark[current.char] or above_mark[current.char] or below_mark[current.char] or post_mark[current.char] then
--- local n = node.copy(current)
--- if pre_mark[current.char] then
--- n.char = dotted_circle
--- else
--- current.char = dotted_circle
--- end
--- head, current = node.insert_after(head, current, n)
--- end
--- end
---
--- start = false
--- current = current.next
--- end
---
--- return head, done
--- end
---
--- function otf.handlers.dev2_reorder_matras(start,kind,lookupname,replacement)
--- return dev2_reorder_matras(start,kind,lookupname,replacement)
--- end
---
--- function otf.handlers.dev2_reorder_reph(start,kind,lookupname,replacement)
--- return dev2_reorder_reph(start,kind,lookupname,replacement)
--- end
---
--- function otf.handlers.dev2_reorder_pre_base_reordering_consonants(start,kind,lookupname,replacement)
--- return dev2_reorder_pre_base_reordering_consonants(start,kind,lookupname,replacement)
--- end
---
--- function otf.handlers.remove_joiners(start,kind,lookupname,replacement)
--- return remove_joiners(start,kind,lookupname,replacement)
--- end
diff --git a/tex/context/base/mkiv/font-odv.lua b/tex/context/base/mkiv/font-odv.lua
deleted file mode 100644
index 345b17a52..000000000
--- a/tex/context/base/mkiv/font-odv.lua
+++ /dev/null
@@ -1,2380 +0,0 @@
-if not modules then modules = { } end modules ['font-odv'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Kai Eigner, TAT Zetwerk / Hans Hagen, PRAGMA ADE",
- copyright = "TAT Zetwerk / PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- One day I'll speed this up ... char swapping and properties.
-
--- A few remarks:
---
--- This code is a partial rewrite of the code that deals with devanagari. The data and logic
--- is by Kai Eigner and based based on Microsoft's OpenType specifications for specific
--- scripts, but with a few improvements. More information can be found at:
---
--- deva: http://www.microsoft.com/typography/OpenType%20Dev/devanagari/introO.mspx
--- dev2: http://www.microsoft.com/typography/OpenType%20Dev/devanagari/intro.mspx
---
--- Rajeesh Nambiar provided patches for the malayalam variant. Thansk to feedback from
--- the mailing list some aspects could be improved.
---
--- As I touched nearly all code, reshuffled it, optimized a lot, etc. etc. (imagine how
--- much can get messed up in over a week work) it could be that I introduced bugs. There
--- is more to gain (esp in the functions applied to a range) but I'll do that when
--- everything works as expected. Kai's original code is kept in font-odk.lua as a reference
--- so blame me (HH) for bugs.
---
--- Interesting is that Kai managed to write this on top of the existing otf handler. Only a
--- few extensions were needed, like a few more analyzing states and dealing with changed
--- head nodes in the core scanner as that only happens here. There's a lot going on here
--- and it's only because I touched nearly all code that I got a bit of a picture of what
--- happens. For in-depth knowledge one needs to consult Kai.
---
--- The rewrite mostly deals with efficiency, both in terms of speed and code. We also made
--- sure that it suits generic use as well as use in ConTeXt. I removed some buglets but can
--- as well have messed up the logic by doing this. For this we keep the original around
--- as that serves as reference. Due to the lots of reshuffling glyphs quite some leaks
--- occur(red) but once I'm satisfied with the rewrite I'll weed them. I also integrated
--- initialization etc into the regular mechanisms.
---
--- In the meantime, we're down from 25.5-3.5=22 seconds to 17.7-3.5=14.2 seconds for a 100
--- page sample (mid 2012) with both variants so it's worth the effort. Some more speedup is
--- to be expected. Due to the method chosen it will never be real fast. If I ever become a
--- power user I'll have a go at some further speed up. I will rename some functions (and
--- features) once we don't need to check the original code. We now use a special subset
--- sequence for use inside the analyzer (after all we could can store this in the dataset
--- and save redundant analysis).
---
--- I might go for an array approach with respect to attributes (and reshuffling). Easier.
---
--- Some data will move to char-def.lua (some day).
---
--- Hans Hagen, PRAGMA-ADE, Hasselt NL
---
--- We could have c_nukta, c_halant, c_ra is we know that they are never used mixed within
--- one script .. yes or no?
---
--- Matras: according to Microsoft typography specifications "up to one of each type:
--- pre-, above-, below- or post- base", but that does not seem to be right. It could
--- become an option.
---
--- The next code looks weird anyway: the "and boolean" should move inside the if
--- or we should check differently (case vs successive).
---
--- local function ms_matra(c)
--- local prebase, abovebase, belowbase, postbase = true, true, true, true
--- local n = getnext(c)
--- while n and getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font do
--- local char = getchar(n)
--- if not dependent_vowel[char] then
--- break
--- elseif pre_mark[char] and prebase then
--- prebase = false
--- elseif above_mark[char] and abovebase then
--- abovebase = false
--- elseif below_mark[char] and belowbase then
--- belowbase = false
--- elseif post_mark[char] and postbase then
--- postbase = false
--- else
--- return c
--- end
--- c = getnext(c)
--- end
--- return c
--- end
-
--- todo: first test for font then for subtype
-
-local insert, imerge = table.insert, table.imerge
-local next = next
-
-local report_devanagari = logs.reporter("otf","devanagari")
-
-fonts = fonts or { }
-fonts.analyzers = fonts.analyzers or { }
-fonts.analyzers.methods = fonts.analyzers.methods or { node = { otf = { } } }
-
-local otf = fonts.handlers.otf
-
-local nodecodes = nodes.nodecodes
-local glyph_code = nodecodes.glyph
-
-local handlers = otf.handlers
-local methods = fonts.analyzers.methods
-
-local otffeatures = fonts.constructors.features.otf
-local registerotffeature = otffeatures.register
-
-local nuts = nodes.nuts
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local getnext = nuts.getnext
-local setnext = nuts.setnext
-local getprev = nuts.getprev
-local setprev = nuts.setprev
-local getid = nuts.getid
-local getchar = nuts.getchar
-local setchar = nuts.setchar
-local getfont = nuts.getfont
-local getsubtype = nuts.getsubtype
-local getprop = nuts.getprop
-local setprop = nuts.setprop
-
-local insert_node_after = nuts.insert_after
-local copy_node = nuts.copy
-local remove_node = nuts.remove
-local flush_list = nuts.flush_list
-local flush_node = nuts.flush_node
-
-local copyinjection = nodes.injections.copy -- KE: is this necessary? HH: probably not as positioning comes later and we rawget/set
-
-local unsetvalue = attributes.unsetvalue
-
-local fontdata = fonts.hashes.identifiers
-
-local a_state = attributes.private('state')
-local a_syllabe = attributes.private('syllabe')
-
-local dotted_circle = 0x25CC
-
-local states = fonts.analyzers.states -- not features
-
-local s_rphf = states.rphf
-local s_half = states.half
-local s_pref = states.pref
-local s_blwf = states.blwf
-local s_pstf = states.pstf
-
-local replace_all_nbsp = nil
-
-replace_all_nbsp = function(head) -- delayed definition
- replace_all_nbsp = typesetters and typesetters.characters and typesetters.characters.replacenbspaces or function(head)
- return head
- end
- return replace_all_nbsp(head)
-end
-
-local xprocesscharacters = nil
-
-if context then
- xprocesscharacters = function(head,font)
- xprocesscharacters = nodes.handlers.characters
- return xprocesscharacters(head,font)
- end
-else
- xprocesscharacters = function(head,font)
- xprocesscharacters = nodes.handlers.nodepass -- generic
- return xprocesscharacters(head,font)
- end
-end
-
-local function processcharacters(head,font)
- return tonut(xprocesscharacters(tonode(head)))
-end
-
--- local fontprocesses = fonts.hashes.processes
---
--- function processcharacters(head,font)
--- local processors = fontprocesses[font]
--- for i=1,#processors do
--- head = processors[i](head,font,0)
--- end
--- return head, true
--- end
-
--- In due time there will be entries here for scripts like Bengali, Gujarati,
--- Gurmukhi, Kannada, Malayalam, Oriya, Tamil, Telugu. Feel free to provide the
--- code points.
-
--- We can assume that script are not mixed in the source but if that is the case
--- we might need to have consonants etc per script and initialize a local table
--- pointing to the right one.
-
--- new, to be checked:
---
--- U+00978 : DEVANAGARI LETTER MARWARI DDA
--- U+00980 : BENGALI ANJI
--- U+00C00 : TELUGU SIGN COMBINING CANDRABINDU ABOVE
--- U+00C34 : TELUGU LETTER LLLA
--- U+00C81 : KANNADA SIGN CANDRABINDU
--- U+00D01 : MALAYALAM SIGN CANDRABINDU
--- U+00DE6 : SINHALA LITH DIGIT ZERO
--- U+00DE7 : SINHALA LITH DIGIT ONE
--- U+00DE8 : SINHALA LITH DIGIT TWO
--- U+00DE9 : SINHALA LITH DIGIT THREE
--- U+00DEA : SINHALA LITH DIGIT FOUR
--- U+00DEB : SINHALA LITH DIGIT FIVE
--- U+00DEC : SINHALA LITH DIGIT SIX
--- U+00DED : SINHALA LITH DIGIT SEVEN
--- U+00DEE : SINHALA LITH DIGIT EIGHT
--- U+00DEF : SINHALA LITH DIGIT NINE
-
-local consonant = {
- -- devanagari
- [0x0915] = true, [0x0916] = true, [0x0917] = true, [0x0918] = true,
- [0x0919] = true, [0x091A] = true, [0x091B] = true, [0x091C] = true,
- [0x091D] = true, [0x091E] = true, [0x091F] = true, [0x0920] = true,
- [0x0921] = true, [0x0922] = true, [0x0923] = true, [0x0924] = true,
- [0x0925] = true, [0x0926] = true, [0x0927] = true, [0x0928] = true,
- [0x0929] = true, [0x092A] = true, [0x092B] = true, [0x092C] = true,
- [0x092D] = true, [0x092E] = true, [0x092F] = true, [0x0930] = true,
- [0x0931] = true, [0x0932] = true, [0x0933] = true, [0x0934] = true,
- [0x0935] = true, [0x0936] = true, [0x0937] = true, [0x0938] = true,
- [0x0939] = true, [0x0958] = true, [0x0959] = true, [0x095A] = true,
- [0x095B] = true, [0x095C] = true, [0x095D] = true, [0x095E] = true,
- [0x095F] = true, [0x0979] = true, [0x097A] = true,
- -- kannada
- [0x0C95] = true, [0x0C96] = true, [0x0C97] = true, [0x0C98] = true,
- [0x0C99] = true, [0x0C9A] = true, [0x0C9B] = true, [0x0C9C] = true,
- [0x0C9D] = true, [0x0C9E] = true, [0x0C9F] = true, [0x0CA0] = true,
- [0x0CA1] = true, [0x0CA2] = true, [0x0CA3] = true, [0x0CA4] = true,
- [0x0CA5] = true, [0x0CA6] = true, [0x0CA7] = true, [0x0CA8] = true,
- [0x0CA9] = true, [0x0CAA] = true, [0x0CAB] = true, [0x0CAC] = true,
- [0x0CAD] = true, [0x0CAE] = true, [0x0CAF] = true, [0x0CB0] = true,
- [0x0CB1] = true, [0x0CB2] = true, [0x0CB3] = true, [0x0CB4] = true,
- [0x0CB5] = true, [0x0CB6] = true, [0x0CB7] = true, [0x0CB8] = true,
- [0x0CB9] = true,
- [0x0CDE] = true, -- obsolete
- -- malayalam
- [0x0D15] = true, [0x0D16] = true, [0x0D17] = true, [0x0D18] = true,
- [0x0D19] = true, [0x0D1A] = true, [0x0D1B] = true, [0x0D1C] = true,
- [0x0D1D] = true, [0x0D1E] = true, [0x0D1F] = true, [0x0D20] = true,
- [0x0D21] = true, [0x0D22] = true, [0x0D23] = true, [0x0D24] = true,
- [0x0D25] = true, [0x0D26] = true, [0x0D27] = true, [0x0D28] = true,
- [0x0D29] = true, [0x0D2A] = true, [0x0D2B] = true, [0x0D2C] = true,
- [0x0D2D] = true, [0x0D2E] = true, [0x0D2F] = true, [0x0D30] = true,
- [0x0D31] = true, [0x0D32] = true, [0x0D33] = true, [0x0D34] = true,
- [0x0D35] = true, [0x0D36] = true, [0x0D37] = true, [0x0D38] = true,
- [0x0D39] = true, [0x0D3A] = true,
-}
-
-local independent_vowel = {
- -- devanagari
- [0x0904] = true, [0x0905] = true, [0x0906] = true, [0x0907] = true,
- [0x0908] = true, [0x0909] = true, [0x090A] = true, [0x090B] = true,
- [0x090C] = true, [0x090D] = true, [0x090E] = true, [0x090F] = true,
- [0x0910] = true, [0x0911] = true, [0x0912] = true, [0x0913] = true,
- [0x0914] = true, [0x0960] = true, [0x0961] = true, [0x0972] = true,
- [0x0973] = true, [0x0974] = true, [0x0975] = true, [0x0976] = true,
- [0x0977] = true,
- -- kannada
- [0x0C85] = true, [0x0C86] = true, [0x0C87] = true, [0x0C88] = true,
- [0x0C89] = true, [0x0C8A] = true, [0x0C8B] = true, [0x0C8C] = true,
- [0x0C8D] = true, [0x0C8E] = true, [0x0C8F] = true, [0x0C90] = true,
- [0x0C91] = true, [0x0C92] = true, [0x0C93] = true, [0x0C94] = true,
- -- malayalam
- [0x0D05] = true, [0x0D06] = true, [0x0D07] = true, [0x0D08] = true,
- [0x0D09] = true, [0x0D0A] = true, [0x0D0B] = true, [0x0D0C] = true,
- [0x0D0E] = true, [0x0D0F] = true, [0x0D10] = true, [0x0D12] = true,
- [0x0D13] = true, [0x0D14] = true,
-}
-
-local dependent_vowel = { -- matra
- -- devanagari
- [0x093A] = true, [0x093B] = true, [0x093E] = true, [0x093F] = true,
- [0x0940] = true, [0x0941] = true, [0x0942] = true, [0x0943] = true,
- [0x0944] = true, [0x0945] = true, [0x0946] = true, [0x0947] = true,
- [0x0948] = true, [0x0949] = true, [0x094A] = true, [0x094B] = true,
- [0x094C] = true, [0x094E] = true, [0x094F] = true, [0x0955] = true,
- [0x0956] = true, [0x0957] = true, [0x0962] = true, [0x0963] = true,
- -- kannada
- [0x0CBE] = true, [0x0CBF] = true, [0x0CC0] = true, [0x0CC1] = true,
- [0x0CC2] = true, [0x0CC3] = true, [0x0CC4] = true, [0x0CC5] = true,
- [0x0CC6] = true, [0x0CC7] = true, [0x0CC8] = true, [0x0CC9] = true,
- [0x0CCA] = true, [0x0CCB] = true, [0x0CCC] = true,
- -- malayalam
- [0x0D3E] = true, [0x0D3F] = true, [0x0D40] = true, [0x0D41] = true,
- [0x0D42] = true, [0x0D43] = true, [0x0D44] = true, [0x0D46] = true,
- [0x0D47] = true, [0x0D48] = true, [0x0D4A] = true, [0x0D4B] = true,
- [0x0D4C] = true, [0x0D57] = true,
-}
-
-local vowel_modifier = {
- -- devanagari
- [0x0900] = true, [0x0901] = true, [0x0902] = true, [0x0903] = true,
- -- A8E0 - A8F1 are cantillation marks for the Samaveda and may not belong here.
- [0xA8E0] = true, [0xA8E1] = true, [0xA8E2] = true, [0xA8E3] = true,
- [0xA8E4] = true, [0xA8E5] = true, [0xA8E6] = true, [0xA8E7] = true,
- [0xA8E8] = true, [0xA8E9] = true, [0xA8EA] = true, [0xA8EB] = true,
- [0xA8EC] = true, [0xA8ED] = true, [0xA8EE] = true, [0xA8EF] = true,
- [0xA8F0] = true, [0xA8F1] = true,
- -- malayalam
- [0x0D02] = true, [0x0D03] = true,
-}
-
-local stress_tone_mark = {
- [0x0951] = true, [0x0952] = true, [0x0953] = true, [0x0954] = true,
- -- kannada
- [0x0CCD] = true,
- -- malayalam
- [0x0D4D] = true,
-}
-
-local nukta = {
- -- devanagari
- [0x093C] = true,
- -- kannada:
- [0x0CBC] = true,
-}
-
-local halant = {
- -- devanagari
- [0x094D] = true,
- -- kannada
- [0x0CCD] = true,
- -- malayalam
- [0x0D4D] = true,
-}
-
-local ra = {
- -- devanagari
- [0x0930] = true,
- -- kannada
- [0x0CB0] = true,
- -- malayalam
- [0x0D30] = true,
-}
-
-local c_anudatta = 0x0952 -- used to be tables
-local c_nbsp = 0x00A0 -- used to be tables
-local c_zwnj = 0x200C -- used to be tables
-local c_zwj = 0x200D -- used to be tables
-
-local zw_char = { -- could also be inlined
- [0x200C] = true,
- [0x200D] = true,
-}
-
--- 0C82 anusvara
--- 0C83 visarga
--- 0CBD avagraha
--- 0CD5 length mark
--- 0CD6 ai length mark
--- 0CE0 letter ll
--- 0CE1 letter rr
--- 0CE2 vowel sign l
--- 0CE2 vowel sign ll
--- 0CF1 sign
--- 0CF2 sign
--- OCE6 - OCEF digits
-
-local pre_mark = {
- [0x093F] = true, [0x094E] = true,
- -- malayalam
- [0x0D46] = true, [0x0D47] = true, [0x0D48] = true,
-}
-
-local above_mark = {
- [0x0900] = true, [0x0901] = true, [0x0902] = true, [0x093A] = true,
- [0x0945] = true, [0x0946] = true, [0x0947] = true, [0x0948] = true,
- [0x0951] = true, [0x0953] = true, [0x0954] = true, [0x0955] = true,
- [0xA8E0] = true, [0xA8E1] = true, [0xA8E2] = true, [0xA8E3] = true,
- [0xA8E4] = true, [0xA8E5] = true, [0xA8E6] = true, [0xA8E7] = true,
- [0xA8E8] = true, [0xA8E9] = true, [0xA8EA] = true, [0xA8EB] = true,
- [0xA8EC] = true, [0xA8ED] = true, [0xA8EE] = true, [0xA8EF] = true,
- [0xA8F0] = true, [0xA8F1] = true,
- -- malayalam
- [0x0D4E] = true,
-}
-
-local below_mark = {
- [0x093C] = true, [0x0941] = true, [0x0942] = true, [0x0943] = true,
- [0x0944] = true, [0x094D] = true, [0x0952] = true, [0x0956] = true,
- [0x0957] = true, [0x0962] = true, [0x0963] = true,
-}
-
-local post_mark = {
- [0x0903] = true, [0x093B] = true, [0x093E] = true, [0x0940] = true,
- [0x0949] = true, [0x094A] = true, [0x094B] = true, [0x094C] = true,
- [0x094F] = true,
-}
-
-local twopart_mark = {
- -- malayalam
- [0x0D4A] = { 0x0D46, 0x0D3E, }, -- ൊ
- [0x0D4B] = { 0x0D47, 0x0D3E, }, -- ോ
- [0x0D4C] = { 0x0D46, 0x0D57, }, -- ൌ
-}
-
-local mark_four = { } -- As we access these frequently an extra hash is used.
-
-for k, v in next, pre_mark do mark_four[k] = pre_mark end
-for k, v in next, above_mark do mark_four[k] = above_mark end
-for k, v in next, below_mark do mark_four[k] = below_mark end
-for k, v in next, post_mark do mark_four[k] = post_mark end
-
-local mark_above_below_post = { }
-
-for k, v in next, above_mark do mark_above_below_post[k] = above_mark end
-for k, v in next, below_mark do mark_above_below_post[k] = below_mark end
-for k, v in next, post_mark do mark_above_below_post[k] = post_mark end
-
--- Again, this table can be extended for other scripts than devanagari. Actually,
--- for ConTeXt this kind of data is kept elsewhere so eventually we might move
--- tables to someplace else.
-
-local reorder_class = {
- -- devanagari
- [0x0930] = "before postscript",
- [0x093F] = "before half",
- [0x0940] = "after subscript",
- [0x0941] = "after subscript",
- [0x0942] = "after subscript",
- [0x0943] = "after subscript",
- [0x0944] = "after subscript",
- [0x0945] = "after subscript",
- [0x0946] = "after subscript",
- [0x0947] = "after subscript",
- [0x0948] = "after subscript",
- [0x0949] = "after subscript",
- [0x094A] = "after subscript",
- [0x094B] = "after subscript",
- [0x094C] = "after subscript",
- [0x0962] = "after subscript",
- [0x0963] = "after subscript",
- [0x093E] = "after subscript",
- -- kannada:
- [0x0CB0] = "after postscript", -- todo in code below
- [0x0CBF] = "before subscript", -- todo in code below
- [0x0CC6] = "before subscript", -- todo in code below
- [0x0CCC] = "before subscript", -- todo in code below
- [0x0CBE] = "before subscript", -- todo in code below
- [0x0CE2] = "before subscript", -- todo in code below
- [0x0CE3] = "before subscript", -- todo in code below
- [0x0CC1] = "before subscript", -- todo in code below
- [0x0CC2] = "before subscript", -- todo in code below
- [0x0CC3] = "after subscript",
- [0x0CC4] = "after subscript",
- [0x0CD5] = "after subscript",
- [0x0CD6] = "after subscript",
- -- malayalam
-}
-
--- We use some pseudo features as we need to manipulate the nodelist based
--- on information in the font as well as already applied features.
-
-local dflt_true = {
- dflt = true
-}
-
-local dev2_defaults = {
- dev2 = dflt_true,
-}
-
-local deva_defaults = {
- dev2 = dflt_true,
- deva = dflt_true,
-}
-
-local false_flags = { false, false, false, false }
-
-local both_joiners_true = {
- [0x200C] = true,
- [0x200D] = true,
-}
-
-local sequence_reorder_matras = {
- chain = 0,
- features = { dv01 = dev2_defaults },
- flags = false_flags,
- name = "dv01_reorder_matras",
- order = { "dv01" },
- subtables = { "dv01_reorder_matras" },
- type = "devanagari_reorder_matras",
-}
-
-local sequence_reorder_reph = {
- chain = 0,
- features = { dv02 = dev2_defaults },
- flags = false_flags,
- name = "dv02_reorder_reph",
- order = { "dv02" },
- subtables = { "dv02_reorder_reph" },
- type = "devanagari_reorder_reph",
-}
-
-local sequence_reorder_pre_base_reordering_consonants = {
- chain = 0,
- features = { dv03 = dev2_defaults },
- flags = false_flags,
- name = "dv03_reorder_pre_base_reordering_consonants",
- order = { "dv03" },
- subtables = { "dv03_reorder_pre_base_reordering_consonants" },
- type = "devanagari_reorder_pre_base_reordering_consonants",
-}
-
-local sequence_remove_joiners = {
- chain = 0,
- features = { dv04 = deva_defaults },
- flags = false_flags,
- name = "dv04_remove_joiners",
- order = { "dv04" },
- subtables = { "dv04_remove_joiners" },
- type = "devanagari_remove_joiners",
-}
-
--- Looping over feature twice as efficient as looping over basic forms (some
--- 350 checks instead of 750 for one font). This is something to keep an eye on
--- as it might depends on the font. Not that it's a bottleneck.
-
-local basic_shaping_forms = {
- nukt = true,
- akhn = true,
- rphf = true,
- pref = true,
- rkrf = true,
- blwf = true,
- half = true,
- pstf = true,
- vatu = true,
- cjct = true,
-}
-
-local function initializedevanagi(tfmdata)
- local script, language = otf.scriptandlanguage(tfmdata,attr) -- todo: take fast variant
- if script == "deva" or script == "dev2" or script =="mlym" or script == "mlm2" then
- local resources = tfmdata.resources
- local lookuphash = resources.lookuphash
- if not lookuphash["dv01"] then
- report_devanagari("adding devanagari features to font")
- --
- local features = resources.features
- local gsubfeatures = features.gsub
- local sequences = resources.sequences
- local sharedfeatures = tfmdata.shared.features
- --
- local lastmatch = 0
- for s=1,#sequences do -- classify chars
- local features = sequences[s].features
- if features then
- for k, v in next, features do
- if basic_shaping_forms[k] then
- lastmatch = s
- end
- end
- end
- end
- local insertindex = lastmatch + 1
- --
- lookuphash["dv04_remove_joiners"] = both_joiners_true
- --
- gsubfeatures["dv01"] = dev2_defaults -- reorder matras
- gsubfeatures["dv02"] = dev2_defaults -- reorder reph
- gsubfeatures["dv03"] = dev2_defaults -- reorder pre base reordering consonants
- gsubfeatures["dv04"] = deva_defaults -- remove joiners
- --
- insert(sequences,insertindex,sequence_reorder_pre_base_reordering_consonants)
- insert(sequences,insertindex,sequence_reorder_reph)
- insert(sequences,insertindex,sequence_reorder_matras)
- insert(sequences,insertindex,sequence_remove_joiners)
- --
- if script == "deva" then
- sharedfeatures["dv04"] = true -- dv04_remove_joiners
- elseif script == "dev2" then
- sharedfeatures["dv01"] = true -- dv01_reorder_matras
- sharedfeatures["dv02"] = true -- dv02_reorder_reph
- sharedfeatures["dv03"] = true -- dv03_reorder_pre_base_reordering_consonants
- sharedfeatures["dv04"] = true -- dv04_remove_joiners
- elseif script == "mlym" then
- sharedfeatures["pstf"] = true
- elseif script == "mlm2" then
- sharedfeatures["pstf"] = true
- sharedfeatures["pref"] = true
- sharedfeatures["dv03"] = true -- dv03_reorder_pre_base_reordering_consonants
- gsubfeatures["dv03"] = dev2_defaults -- reorder pre base reordering consonants
- insert(sequences,insertindex,sequence_reorder_pre_base_reordering_consonants)
- end
- end
- end
-end
-
-registerotffeature {
- name = "devanagari",
- description = "inject additional features",
- default = true,
- initializers = {
- node = initializedevanagi,
- },
-}
-
--- hm, this is applied to one character:
-
-local function deva_initialize(font,attr)
-
- local tfmdata = fontdata[font]
- local resources = tfmdata.resources
- local lookuphash = resources.lookuphash
-
- local datasets = otf.dataset(tfmdata,font,attr)
- local devanagaridata = datasets.devanagari
-
- if devanagaridata then -- maybe also check for e.g. reph
-
- return lookuphash, devanagaridata.reph, devanagaridata.vattu, devanagaridata.blwfcache
-
- else
-
- devanagaridata = { }
- datasets.devanagari = devanagaridata
-
- local reph = false
- local vattu = false
- local blwfcache = { }
-
- local sequences = resources.sequences
-
- for s=1,#sequences do -- triggers creation of dataset
- -- local sequence = sequences[s]
- local dataset = datasets[s]
- if dataset and dataset[1] then -- value
- local kind = dataset[4]
- if kind == "rphf" then
- -- deva
- reph = true
- elseif kind == "blwf" then
- -- deva
- vattu = true
- -- dev2
- local subtables = dataset[3].subtables
- for i=1,#subtables do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- for k, v in next, lookupcache do
- blwfcache[k] = blwfcache[k] or v
- end
- end
- end
- end
- end
- end
-
- devanagaridata.reph = reph
- devanagaridata.vattu = vattu
- devanagaridata.blwfcache = blwfcache
-
- return lookuphash, reph, vattu, blwfcache
-
- end
-
-end
-
-local function deva_reorder(head,start,stop,font,attr,nbspaces)
-
- local lookuphash, reph, vattu, blwfcache = deva_initialize(font,attr) -- could be inlines but ugly
-
- local current = start
- local n = getnext(start)
- local base = nil
- local firstcons = nil
- local lastcons = nil
- local basefound = false
-
- if ra[getchar(start)] and halant[getchar(n)] and reph then
- -- if syllable starts with Ra + H and script has 'Reph' then exclude Reph
- -- from candidates for base consonants
- if n == stop then
- return head, stop, nbspaces
- end
- if getchar(getnext(n)) == c_zwj then
- current = start
- else
- current = getnext(n)
- setprop(start,a_state,s_rphf)
- end
- end
-
- if getchar(current) == c_nbsp then
- -- Stand Alone cluster
- if current == stop then
- stop = getprev(stop)
- head = remove_node(head,current)
- flush_node(current)
- return head, stop, nbspaces
- else
- nbspaces = nbspaces + 1
- base = current
- firstcons = current
- lastcons = current
- current = getnext(current)
- if current ~= stop then
- if nukta[getchar(current)] then
- current = getnext(current)
- end
- if getchar(current) == c_zwj then
- if current ~= stop then
- local next = getnext(current)
- if next ~= stop and halant[getchar(next)] then
- current = next
- next = getnext(current)
- local tmp = next and getnext(next) or nil -- needs checking
- local changestop = next == stop
- local tempcurrent = copy_node(next)
- copyinjection(tempcurrent,next)
- local nextcurrent = copy_node(current)
- copyinjection(nextcurrent,current) -- KE: necessary? HH: probably not as positioning comes later and we rawget/set
- setnext(tempcurrent,nextcurrent)
- setprev(nextcurrent,tempcurrent)
- setprop(tempcurrent,a_state,s_blwf)
- tempcurrent = processcharacters(tempcurrent,font)
- setprop(tempcurrent,a_state,unsetvalue)
- if getchar(next) == getchar(tempcurrent) then
- flush_list(tempcurrent)
- local n = copy_node(current)
- copyinjection(n,current) -- KE: necessary? HH: probably not as positioning comes later and we rawget/set
- setchar(current,dotted_circle)
- head = insert_node_after(head, current, n)
- else
- setchar(current,getchar(tempcurrent)) -- we assumes that the result of blwf consists of one node
- local freenode = getnext(current)
- setnext(current,tmp)
- if tmp then
- setprev(tmp,current)
- end
- flush_node(freenode)
- flush_list(tempcurrent)
- if changestop then
- stop = current
- end
- end
- end
- end
- end
- end
- end
- end
-
- while not basefound do
- -- find base consonant
- if consonant[getchar(current)] then
- setprop(current,a_state,s_half)
- if not firstcons then
- firstcons = current
- end
- lastcons = current
- if not base then
- base = current
- elseif blwfcache[getchar(current)] then
- -- consonant has below-base (or post-base) form
- setprop(current,a_state,s_blwf)
- else
- base = current
- end
- end
- basefound = current == stop
- current = getnext(current)
- end
-
- if base ~= lastcons then
- -- if base consonant is not last one then move halant from base consonant to last one
- local np = base
- local n = getnext(base)
- if nukta[getchar(n)] then
- np = n
- n = getnext(n)
- end
- if halant[getchar(n)] then
- if lastcons ~= stop then
- local ln = getnext(lastcons)
- if nukta[getchar(ln)] then
- lastcons = ln
- end
- end
- -- local np = getprev(n)
- local nn = getnext(n)
- local ln = getnext(lastcons) -- what if lastcons is nn ?
- setnext(np,nn)
- setprev(nn,np)
- setnext(lastcons,n)
- if ln then
- setprev(ln,n)
- end
- setnext(n,ln)
- setprev(n,lastcons)
- if lastcons == stop then
- stop = n
- end
- end
- end
-
- n = getnext(start)
- if n ~= stop and ra[getchar(start)] and halant[getchar(n)] and not zw_char[getchar(getnext(n))] then
- -- if syllable starts with Ra + H then move this combination so that it follows either:
- -- the post-base 'matra' (if any) or the base consonant
- local matra = base
- if base ~= stop then
- local next = getnext(base)
- if dependent_vowel[getchar(next)] then
- matra = next
- end
- end
- -- [sp][start][n][nn] [matra|base][?]
- -- [matra|base][start] [n][?] [sp][nn]
- local sp = getprev(start)
- local nn = getnext(n)
- local mn = getnext(matra)
- if sp then
- setnext(sp,nn)
- end
- setprev(nn,sp)
- setnext(matra,start)
- setprev(start,matra)
- setnext(n,mn)
- if mn then
- setprev(mn,n)
- end
- if head == start then
- head = nn
- end
- start = nn
- if matra == stop then
- stop = n
- end
- end
-
- local current = start
- while current ~= stop do
- local next = getnext(current)
- if next ~= stop and halant[getchar(next)] and getchar(getnext(next)) == c_zwnj then
- setprop(current,a_state,unsetvalue)
- end
- current = next
- end
-
- if base ~= stop and getprop(base,a_state) then
- local next = getnext(base)
- if halant[getchar(next)] and not (next ~= stop and getchar(getnext(next)) == c_zwj) then
- setprop(base,a_state,unsetvalue)
- end
- end
-
- -- ToDo: split two- or three-part matras into their parts. Then, move the left 'matra' part to the beginning of the syllable.
- -- Not necessary for Devanagari. However it is necessay for other scripts, such as Tamil (e.g. TAMIL VOWEL SIGN O - 0BCA)
-
- -- classify consonants and 'matra' parts as pre-base, above-base (Reph), below-base or post-base, and group elements of the syllable (consonants and 'matras') according to this classification
-
- local current, allreordered, moved = start, false, { [base] = true }
- local a, b, p, bn = base, base, base, getnext(base)
- if base ~= stop and nukta[getchar(bn)] then
- a, b, p = bn, bn, bn
- end
- while not allreordered do
- -- current is always consonant
- local c = current
- local n = getnext(current)
- local l = nil -- used ?
- if c ~= stop then
- if nukta[getchar(n)] then
- c = n
- n = getnext(n)
- end
- if c ~= stop then
- if halant[getchar(n)] then
- c = n
- n = getnext(n)
- end
- while c ~= stop and dependent_vowel[getchar(n)] do
- c = n
- n = getnext(n)
- end
- if c ~= stop then
- if vowel_modifier[getchar(n)] then
- c = n
- n = getnext(n)
- end
- if c ~= stop and stress_tone_mark[getchar(n)] then
- c = n
- n = getnext(n)
- end
- end
- end
- end
- local bp = getprev(firstcons)
- local cn = getnext(current)
- local last = getnext(c)
- while cn ~= last do
- -- move pre-base matras...
- if pre_mark[getchar(cn)] then
- if bp then
- setnext(bp,cn)
- end
- local next = getnext(cn)
- local prev = getprev(cn)
- if next then
- setprev(next,prev)
- end
- setnext(prev,next)
- if cn == stop then
- stop = prev
- end
- setprev(cn,bp)
- setnext(cn,firstcons)
- setprev(firstcons,cn)
- if firstcons == start then
- if head == start then
- head = cn
- end
- start = cn
- end
- break
- end
- cn = getnext(cn)
- end
- allreordered = c == stop
- current = getnext(c)
- end
-
- if reph or vattu then
- local current, cns = start, nil
- while current ~= stop do
- local c = current
- local n = getnext(current)
- if ra[getchar(current)] and halant[getchar(n)] then
- c = n
- n = getnext(n)
- local b, bn = base, base
- while bn ~= stop do
- local next = getnext(bn)
- if dependent_vowel[getchar(next)] then
- b = next
- end
- bn = next
- end
- if getprop(current,a_state) == s_rphf then
- -- position Reph (Ra + H) after post-base 'matra' (if any) since these
- -- become marks on the 'matra', not on the base glyph
- if b ~= current then
- if current == start then
- if head == start then
- head = n
- end
- start = n
- end
- if b == stop then
- stop = c
- end
- local prev = getprev(current)
- if prev then
- setnext(prev,n)
- end
- if n then
- setprev(n,prev)
- end
- local next = getnext(b)
- setnext(c,next)
- if next then
- setprev(next,c)
- end
- setnext(c,next)
- setnext(b,current)
- setprev(current,b)
- end
- elseif cns and getnext(cns) ~= current then -- todo: optimize next
- -- position below-base Ra (vattu) following the consonants on which it is placed (either the base consonant or one of the pre-base consonants)
- local cp, cnsn = getprev(current), getnext(cns)
- if cp then
- setnext(cp,n)
- end
- if n then
- setprev(n,cp)
- end
- setnext(cns,current)
- setprev(current,cns)
- setnext(c,cnsn)
- if cnsn then
- setprev(cnsn,c)
- end
- if c == stop then
- stop = cp
- break
- end
- current = getprev(n)
- end
- else
- local char = getchar(current)
- if consonant[char] then
- cns = current
- local next = getnext(cns)
- if halant[getchar(next)] then
- cns = next
- end
- elseif char == c_nbsp then
- nbspaces = nbspaces + 1
- cns = current
- local next = getnext(cns)
- if halant[getchar(next)] then
- cns = next
- end
- end
- end
- current = getnext(current)
- end
- end
-
- if getchar(base) == c_nbsp then
- nbspaces = nbspaces - 1
- head = remove_node(head,base)
- flush_node(base)
- end
-
- return head, stop, nbspaces
-end
-
--- If a pre-base matra character had been reordered before applying basic features,
--- the glyph can be moved closer to the main consonant based on whether half-forms had been formed.
--- Actual position for the matra is defined as “after last standalone halant glyph,
--- after initial matra position and before the main consonant”.
--- If ZWJ or ZWNJ follow this halant, position is moved after it.
-
--- so we break out ... this is only done for the first 'word' (if we feed words we can as
--- well test for non glyph.
-
-function handlers.devanagari_reorder_matras(head,start,kind,lookupname,replacement) -- no leak
- local current = start -- we could cache attributes here
- local startfont = getfont(start)
- local startattr = getprop(start,a_syllabe)
- -- can be fast loop
- while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font and getprop(current,a_syllabe) == startattr do
- local next = getnext(current)
- if halant[getchar(current)] and not getprop(current,a_state) then
- if next and getid(next) == glyph_code and getsubtype(next) < 256 and getfont(next) == font and getprop(next,a_syllabe) == startattr and zw_char[getchar(next)] then
- current = next
- end
- local startnext = getnext(start)
- head = remove_node(head,start)
- local next = getnext(current)
- if next then
- setprev(next,start)
- end
- setnext(start,next)
- setnext(current,start)
- setprev(start,current)
- start = startnext
- break
- end
- current = next
- end
- return head, start, true
-end
-
--- todo: way more caching of attributes and font
-
--- Reph’s original position is always at the beginning of the syllable, (i.e. it is not reordered at the character reordering stage).
--- However, it will be reordered according to the basic-forms shaping results.
--- Possible positions for reph, depending on the script, are; after main, before post-base consonant forms,
--- and after post-base consonant forms.
-
--- 1 If reph should be positioned after post-base consonant forms, proceed to step 5.
--- 2 If the reph repositioning class is not after post-base: target position is after the first explicit halant glyph between
--- the first post-reph consonant and last main consonant. If ZWJ or ZWNJ are following this halant, position is moved after it.
--- If such position is found, this is the target position. Otherwise, proceed to the next step.
--- Note: in old-implementation fonts, where classifications were fixed in shaping engine,
--- there was no case where reph position will be found on this step.
--- 3 If reph should be repositioned after the main consonant: from the first consonant not ligated with main,
--- or find the first consonant that is not a potential pre-base reordering Ra.
--- 4 If reph should be positioned before post-base consonant, find first post-base classified consonant not ligated with main.
--- If no consonant is found, the target position should be before the first matra, syllable modifier sign or vedic sign.
--- 5 If no consonant is found in steps 3 or 4, move reph to a position immediately before the first post-base matra,
--- syllable modifier sign or vedic sign that has a reordering class after the intended reph position.
--- For example, if the reordering position for reph is post-main, it will skip above-base matras that also have a post-main position.
--- 6 Otherwise, reorder reph to the end of the syllable.
-
--- hm, this only looks at the start of a nodelist ... is this supposed to be line based?
-
-function handlers.devanagari_reorder_reph(head,start,kind,lookupname,replacement)
- -- since in Devanagari reph has reordering position 'before postscript' dev2 only follows step 2, 4, and 6,
- -- the other steps are still ToDo (required for scripts other than dev2)
- local current = getnext(start)
- local startnext = nil
- local startprev = nil
- local startfont = getfont(start)
- local startattr = getprop(start,a_syllabe)
- while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == startfont and getprop(current,a_syllabe) == startattr do --step 2
- if halant[getchar(current)] and not getprop(current,a_state) then
- local next = getnext(current)
- if next and getid(next) == glyph_code and getsubtype(next) < 256 and getfont(next) == startfont and getprop(next,a_syllabe) == startattr and zw_char[getchar(next)] then
- current = next
- end
- startnext = getnext(start)
- head = remove_node(head,start)
- local next = getnext(current)
- if next then
- setprev(next,start)
- end
- setnext(start,next)
- setnext(current,start)
- setprev(start,current)
- start = startnext
- startattr = getprop(start,a_syllabe)
- break
- end
- current = getnext(current)
- end
- if not startnext then
- current = getnext(start)
- while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == startfont and getprop(current,a_syllabe) == startattr do --step 4
- if getprop(current,a_state) == s_pstf then --post-base
- startnext = getnext(start)
- head = remove_node(head,start)
- local prev = getprev(current)
- setprev(start,prev)
- setnext(prev,start)
- setnext(start,current)
- setprev(current,start)
- start = startnext
- startattr = getprop(start,a_syllabe)
- break
- end
- current = getnext(current)
- end
- end
- -- ToDo: determine position for reph with reordering position other than 'before postscript'
- -- (required for scripts other than dev2)
- -- leaks
- if not startnext then
- current = getnext(start)
- local c = nil
- while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == startfont and getprop(current,a_syllabe) == startattr do --step 5
- if not c then
- local char = getchar(current)
- -- todo: combine in one
- if mark_above_below_post[char] and reorder_class[char] ~= "after subscript" then
- c = current
- end
- end
- current = getnext(current)
- end
- -- here we can loose the old start node: maybe best split cases
- if c then
- startnext = getnext(start)
- head = remove_node(head,start)
- local prev = getprev(c)
- setprev(start,prev)
- setnext(prev,start)
- setnext(start,c)
- setprev(c,start)
- -- end
- start = startnext
- startattr = getprop(start,a_syllabe)
- end
- end
- -- leaks
- if not startnext then
- current = start
- local next = getnext(current)
- while next and getid(next) == glyph_code and getsubtype(next) < 256 and getfont(next) == startfont and getprop(next,a_syllabe) == startattr do --step 6
- current = next
- next = getnext(current)
- end
- if start ~= current then
- startnext = getnext(start)
- head = remove_node(head,start)
- local next = getnext(current)
- if next then
- setprev(next,start)
- end
- setnext(star,next)
- setnext(current,start)
- setprev(start,current)
- start = startnext
- end
- end
- --
- return head, start, true
-end
-
--- we can cache some checking (v)
-
--- If a pre-base reordering consonant is found, reorder it according to the following rules:
---
--- 1 Only reorder a glyph produced by substitution during application of the feature.
--- (Note that a font may shape a Ra consonant with the feature generally but block it in certain contexts.)
--- 2 Try to find a target position the same way as for pre-base matra. If it is found, reorder pre-base consonant glyph.
--- 3 If position is not found, reorder immediately before main consonant.
-
--- UNTESTED: NOT CALLED IN EXAMPLE
-
-function handlers.devanagari_reorder_pre_base_reordering_consonants(head,start,kind,lookupname,replacement)
- local current = start
- local startnext = nil
- local startprev = nil
- local startfont = getfont(start)
- local startattr = getprop(start,a_syllabe)
- -- can be fast for loop + caching state
- while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == startfont and getprop(current,a_syllabe) == startattr do
- local next = getnext(current)
- if halant[getchar(current)] and not getprop(current,a_state) then
- if next and getid(next) == glyph_code and getsubtype(next) < 256 and getfont(next) == font and getprop(next,a_syllabe) == startattr then
- local char = getchar(next)
- if char == c_zwnj or char == c_zwj then
- current = next
- end
- end
- startnext = getnext(start)
- removenode(start,start)
- local next = getnext(current)
- if next then
- setprev(next,start)
- end
- setnext(start,next)
- setnext(current,start)
- setprev(start,current)
- start = startnext
- break
- end
- current = next
- end
- if not startnext then
- current = getnext(start)
- startattr = getprop(start,a_syllabe)
- while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == startfont and getprop(current,a_syllabe) == startattr do
- if not consonant[getchar(current)] and getprop(current,a_state) then --main
- startnext = getnext(start)
- removenode(start,start)
- local prev = getprev(current)
- setprev(start,prev)
- setnext(prev,start)
- setnext(start,current)
- setprev(current,start)
- start = startnext
- break
- end
- current = getnext(current)
- end
- end
- return head, start, true
-end
-
-function handlers.devanagari_remove_joiners(head,start,kind,lookupname,replacement)
- local stop = getnext(start)
- local startfont = getfont(start)
- while stop and getid(stop) == glyph_code and getsubtype(stop) < 256 and getfont(stop) == startfont do
- local char = getchar(stop)
- if char == c_zwnj or char == c_zwj then
- stop = getnext(stop)
- else
- break
- end
- end
- if stop then
- setnext(getprev(stop),nil)
- setprev(stop,getprev(start))
- end
- local prev = getprev(start)
- if prev then
- setnext(prev,stop)
- end
- if head == start then
- head = stop
- end
- flush_list(start)
- return head, stop, true
-end
-
-local valid = {
- akhn = true, -- malayalam
- rphf = true,
- pref = true,
- half = true,
- blwf = true,
- pstf = true,
- pres = true, -- malayalam
- blws = true, -- malayalam
- psts = true, -- malayalam
-}
-
-local function dev2_initialize(font,attr)
-
- local tfmdata = fontdata[font]
- local resources = tfmdata.resources
- local lookuphash = resources.lookuphash
-
- local datasets = otf.dataset(tfmdata,font,attr)
- local devanagaridata = datasets.devanagari
-
- if devanagaridata then -- maybe also check for e.g. seqsubset
-
- return lookuphash, devanagaridata.seqsubset
-
- else
-
- devanagaridata = { }
- datasets.devanagari = devanagaridata
-
- local seqsubset = { }
- devanagaridata.seqsubset = seqsubset
-
- local sequences = resources.sequences
-
- for s=1,#sequences do
- -- local sequence = sequences[s]
- local dataset = datasets[s]
- if dataset and dataset[1] then -- featurevalue
- local kind = dataset[4]
- if kind and valid[kind] then
- -- could become a function call
- local subtables = dataset[3].subtables
- for i=1,#subtables do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local reph = false
- local chain = dataset[3]
- if chain ~= 0 then --rphf is result of of chain
- -- rphf might be result of other handler/chainproc
- else
- -- rphf acts on consonant + halant
- for k, v in next, ra do
- local r = lookupcache[k]
- if r then
- local h = false
- for k, v in next, halant do
- local h = r[k]
- if h then
- reph = h.ligature or false
- break
- end
- end
- if reph then
- break
- end
- end
- end
- end
- seqsubset[#seqsubset+1] = { kind, lookupcache, reph }
- end
- end
- end
- end
- end
-
- lookuphash["dv01_reorder_matras"] = pre_mark -- move to initializer ?
-
- return lookuphash, seqsubset
-
- end
-
-end
-
--- this one will be merged into the caller: it saves a call, but we will then make function
--- of the actions
-
-local function dev2_reorder(head,start,stop,font,attr,nbspaces) -- maybe do a pass over (determine stop in sweep)
-
- local lookuphash, seqsubset = dev2_initialize(font,attr)
-
- local pre_base_reordering_consonants = { } -- was nil ... probably went unnoticed because never assigned
-
- local reph = false -- was nil ... probably went unnoticed because never assigned
- local halfpos = nil
- local basepos = nil
- local subpos = nil
- local postpos = nil
- local locl = { }
-
- for i=1,#seqsubset do
-
- -- maybe quit if start == stop
-
- local subset = seqsubset[i]
- local kind = subset[1]
- local lookupcache = subset[2]
- if kind == "rphf" then
- for k, v in next, ra do
- local r = lookupcache[k]
- if r then
- for k, v in next, halant do
- local h = r[k]
- if h then
- reph = h.ligature or false
- break
- end
- end
- if reph then
- break
- end
- end
- end
- local current = start
- local last = getnext(stop)
- while current ~= last do
- if current ~= stop then
- local c = locl[current] or getchar(current)
- local found = lookupcache[c]
- if found then
- local next = getnext(current)
- local n = locl[next] or getchar(next)
- if found[n] then --above-base: rphf Consonant + Halant
- local afternext = next ~= stop and getnext(next)
- if afternext and zw_char[getchar(afternext)] then -- ZWJ and ZWNJ prevent creation of reph
- current = next
- current = getnext(current)
- elseif current == start then
- setprop(current,a_state,s_rphf)
- current = next
- else
- current = next
- end
- end
- end
- end
- current = getnext(current)
- end
- elseif kind == "pref" then
- -- why not global? pretty ineffient this way
- -- this will move to the initializer and we will store the hash in dataset
- -- todo: reph might also be result of chain
- for k, v in next, halant do
- local h = lookupcache[k]
- if h then
- local found = false
- for k, v in next, h do
- found = v and v.ligature
- if found then
- pre_base_reordering_consonants[k] = found
- break
- end
- end
- if found then
- break
- end
- end
- end
- --
- local current = start
- local last = getnext(stop)
- while current ~= last do
- if current ~= stop then
- local c = locl[current] or getchar(current)
- local found = lookupcache[c]
- if found then
- local next = getnext(current)
- local n = locl[next] or getchar(next)
- if found[n] then
- setprop(current,a_state,s_pref)
- setprop(next,a_state,s_pref)
- current = next
- end
- end
- end
- current = getnext(current)
- end
- elseif kind == "half" then -- half forms: half / Consonant + Halant
- local current = start
- local last = getnext(stop)
- while current ~= last do
- if current ~= stop then
- local c = locl[current] or getchar(current)
- local found = lookupcache[c]
- if found then
- local next = getnext(current)
- local n = locl[next] or getchar(next)
- if found[n] then
- if next ~= stop and getchar(getnext(next)) == c_zwnj then -- zwnj prevent creation of half
- current = next
- else
- setprop(current,a_state,s_half)
- if not halfpos then
- halfpos = current
- end
- end
- current = getnext(current)
- end
- end
- end
- current = getnext(current)
- end
- elseif kind == "blwf" then -- below-base: blwf / Halant + Consonant
- local current = start
- local last = getnext(stop)
- while current ~= last do
- if current ~= stop then
- local c = locl[current] or getchar(current)
- local found = lookupcache[c]
- if found then
- local next = getnext(current)
- local n = locl[next] or getchar(next)
- if found[n] then
- setprop(current,a_state,s_blwf)
- setprop(next,a_state,s_blwf)
- current = next
- subpos = current
- end
- end
- end
- current = getnext(current)
- end
- elseif kind == "pstf" then -- post-base: pstf / Halant + Consonant
- local current = start
- local last = getnext(stop)
- while current ~= last do
- if current ~= stop then
- local c = locl[current] or getchar(current)
- local found = lookupcache[c]
- if found then
- local next = getnext(current)
- local n = locl[next] or getchar(next)
- if found[n] then
- setprop(current,a_state,s_pstf)
- setprop(next,a_state,s_pstf)
- current = next
- postpos = current
- end
- end
- end
- current = getnext(current)
- end
- end
- end
-
- -- this one changes per word
-
- lookuphash["dv02_reorder_reph"] = { [reph] = true }
- lookuphash["dv03_reorder_pre_base_reordering_consonants"] = pre_base_reordering_consonants
-
- local current, base, firstcons = start, nil, nil
-
- if getprop(start,a_state) == s_rphf then
- -- if syllable starts with Ra + H and script has 'Reph' then exclude Reph from candidates for base consonants
- current = getnext(getnext(start))
- end
-
- if current ~= getnext(stop) and getchar(current) == c_nbsp then
- -- Stand Alone cluster
- if current == stop then
- stop = getprev(stop)
- head = remove_node(head,current)
- flush_node(current)
- return head, stop, nbspaces
- else
- nbspaces = nbspaces + 1
- base = current
- current = getnext(current)
- if current ~= stop then
- local char = getchar(current)
- if nukta[char] then
- current = getnext(current)
- char = getchar(current)
- end
- if char == c_zwj then
- local next = getnext(current)
- if current ~= stop and next ~= stop and halant[getchar(next)] then
- current = next
- next = getnext(current)
- local tmp = getnext(next)
- local changestop = next == stop
- setnext(next,nil)
- setprop(current,a_state,s_pref)
- current = processcharacters(current,font)
- setprop(current,a_state,s_blwf)
- current = processcharacters(current,font)
- setprop(current,a_state,s_pstf)
- current = processcharacters(current,font)
- setprop(current,a_state,unsetvalue)
- if halant[getchar(current)] then
- setnext(getnext(current),tmp)
- local nc = copy_node(current)
- copyinjection(nc,current)
- setchar(current,dotted_circle)
- head = insert_node_after(head,current,nc)
- else
- setnext(current,tmp) -- assumes that result of pref, blwf, or pstf consists of one node
- if changestop then
- stop = current
- end
- end
- end
- end
- end
- end
- else -- not Stand Alone cluster
- local last = getnext(stop)
- while current ~= last do -- find base consonant
- local next = getnext(current)
- if consonant[getchar(current)] then
- if not (current ~= stop and next ~= stop and halant[getchar(next)] and getchar(getnext(next)) == c_zwj) then
- if not firstcons then
- firstcons = current
- end
- -- check whether consonant has below-base or post-base form or is pre-base reordering Ra
- local a = getprop(current,a_state)
- if not (a == s_pref or a == s_blwf or a == s_pstf) then
- base = current
- end
- end
- end
- current = next
- end
- if not base then
- base = firstcons
- end
- end
-
- if not base then
- if getprop(start,a_state) == s_rphf then
- setprop(start,a_state,unsetvalue)
- end
- return head, stop, nbspaces
- else
- if getprop(base,a_state) then
- setprop(base,a_state,unsetvalue)
- end
- basepos = base
- end
- if not halfpos then
- halfpos = base
- end
- if not subpos then
- subpos = base
- end
- if not postpos then
- postpos = subpos or base
- end
-
- -- Matra characters are classified and reordered by which consonant in a conjunct they have affinity for
-
- local moved = { }
- local current = start
- local last = getnext(stop)
- while current ~= last do
- local char, target, cn = locl[current] or getchar(current), nil, getnext(current)
- -- not so efficient (needed for malayalam)
- local tpm = twopart_mark[char]
- if tpm then
- local extra = copy_node(current)
- copyinjection(extra,current)
- char = tpm[1]
- setchar(current,char)
- setchar(extra,tpm[2])
- head = insert_node_after(head,current,extra)
- end
- --
- if not moved[current] and dependent_vowel[char] then
- if pre_mark[char] then -- Before first half form in the syllable
- moved[current] = true
- local prev = getprev(current)
- local next = getnext(current)
- if prev then
- setnext(prev,next)
- end
- if next then
- setprev(next,prev)
- end
- if current == stop then
- stop = getprev(current)
- end
- if halfpos == start then
- if head == start then
- head = current
- end
- start = current
- end
- local prev = getprev(halfpos)
- if prev then
- setnext(prev,current)
- end
- setprev(current,prev)
- setprev(halfpos,current)
- setnext(current,halfpos)
- halfpos = current
- elseif above_mark[char] then -- After main consonant
- target = basepos
- if subpos == basepos then
- subpos = current
- end
- if postpos == basepos then
- postpos = current
- end
- basepos = current
- elseif below_mark[char] then -- After subjoined consonants
- target = subpos
- if postpos == subpos then
- postpos = current
- end
- subpos = current
- elseif post_mark[char] then -- After post-form consonant
- target = postpos
- postpos = current
- end
- if mark_above_below_post[char] then
- local prev = getprev(current)
- if prev ~= target then
- local next = getnext(current)
- if prev then -- not needed, already tested with target
- setnext(prev,next)
- end
- if next then
- setprev(next,prev)
- end
- if current == stop then
- stop = prev
- end
- local next = getnext(target)
- if next then
- setprev(next,current)
- end
- setnext(current,next)
- setnext(target,current)
- setprev(current,target)
- end
- end
- end
- current = cn
- end
-
- -- Reorder marks to canonical order: Adjacent nukta and halant or nukta and vedic sign are always repositioned if necessary, so that the nukta is first.
-
- local current, c = start, nil
- while current ~= stop do
- local char = getchar(current)
- if halant[char] or stress_tone_mark[char] then
- if not c then
- c = current
- end
- else
- c = nil
- end
- local next = getnext(current)
- if c and nukta[getchar(next)] then
- if head == c then
- head = next
- end
- if stop == next then
- stop = current
- end
- local prev = getprev(c)
- if prev then
- setnext(prev,next)
- end
- setprev(next,prev)
- local nextnext = getnext(next)
- setnext(current,nextnext)
- local nextnextnext = getnext(nextnext)
- if nextnextnext then
- setprev(nextnextnext,current)
- end
- setprev(c,nextnext)
- setnext(nextnext,c)
- end
- if stop == current then break end
- current = getnext(current)
- end
-
- if getchar(base) == c_nbsp then
- nbspaces = nbspaces - 1
- head = remove_node(head, base)
- flush_node(base)
- end
-
- return head, stop, nbspaces
-end
-
--- cleaned up and optimized ... needs checking (local, check order, fixes, extra hash, etc)
-
-local separator = { }
-
-imerge(separator,consonant)
-imerge(separator,independent_vowel)
-imerge(separator,dependent_vowel)
-imerge(separator,vowel_modifier)
-imerge(separator,stress_tone_mark)
-
-for k, v in next, nukta do separator[k] = true end
-for k, v in next, halant do separator[k] = true end
-
-local function analyze_next_chars_one(c,font,variant) -- skip one dependent vowel
- -- why two variants ... the comment suggests that it's the same ruleset
- local n = getnext(c)
- if not n then
- return c
- end
- if variant == 1 then
- local v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
- if v and nukta[getchar(n)] then
- n = getnext(n)
- if n then
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
- end
- end
- if n and v then
- local nn = getnext(n)
- if nn and getid(nn) == glyph_code and getsubtype(nn) < 256 and getfont(nn) == font then
- local nnn = getnext(nn)
- if nnn and getid(nnn) == glyph_code and getsubtype(nnn) < 256 and getfont(nnn) == font then
- local nnc = getchar(nn)
- local nnnc = getchar(nnn)
- if nnc == c_zwj and consonant[nnnc] then
- c = nnn
- elseif (nnc == c_zwnj or nnc == c_zwj) and halant[nnnc] then
- local nnnn = getnext(nnn)
- if nnnn and getid(nnnn) == glyph_code and consonant[getchar(nnnn)] and getsubtype(nnnn) < 256 and getfont(nnnn) == font then
- c = nnnn
- end
- end
- end
- end
- end
- elseif variant == 2 then
- if getid(n) == glyph_code and nukta[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
- c = n
- end
- n = getnext(c)
- if n and getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font then
- local nn = getnext(n)
- if nn then
- local nv = getid(nn) == glyph_code and getsubtype(nn) < 256 and getfont(nn) == font
- if nv and zw_char[getchar(n)] then
- n = nn
- nn = getnext(nn)
- nv = nn and getid(nn) == glyph_code and getsubtype(nn) < 256 and getfont(nn) == font
- end
- if nv and halant[getchar(n)] and consonant[getchar(nn)] then
- c = nn
- end
- end
- end
- end
- -- c = ms_matra(c)
- local n = getnext(c)
- if not n then
- return c
- end
- local v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
- if not v then
- return c
- end
- local char = getchar(n)
- if dependent_vowel[char] then
- c = getnext(c)
- n = getnext(c)
- if not n then
- return c
- end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
- if not v then
- return c
- end
- char = getchar(n)
- end
- if nukta[char] then
- c = getnext(c)
- n = getnext(c)
- if not n then
- return c
- end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
- if not v then
- return c
- end
- char = getchar(n)
- end
- if halant[char] then
- c = getnext(c)
- n = getnext(c)
- if not n then
- return c
- end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
- if not v then
- return c
- end
- char = getchar(n)
- end
- if vowel_modifier[char] then
- c = getnext(c)
- n = getnext(c)
- if not n then
- return c
- end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
- if not v then
- return c
- end
- char = getchar(n)
- end
- if stress_tone_mark[char] then
- c = getnext(c)
- n = getnext(c)
- if not n then
- return c
- end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
- if not v then
- return c
- end
- char = getchar(n)
- end
- if stress_tone_mark[char] then
- return n
- else
- return c
- end
-end
-
-local function analyze_next_chars_two(c,font)
- local n = getnext(c)
- if not n then
- return c
- end
- if getid(n) == glyph_code and nukta[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
- c = n
- end
- n = c
- while true do
- local nn = getnext(n)
- if nn and getid(nn) == glyph_code and getsubtype(nn) < 256 and getfont(nn) == font then
- local char = getchar(nn)
- if halant[char] then
- n = nn
- local nnn = getnext(nn)
- if nnn and getid(nnn) == glyph_code and zw_char[getchar(nnn)] and getsubtype(nnn) < 256 and getfont(nnn) == font then
- n = nnn
- end
- elseif char == c_zwnj or char == c_zwj then
- -- n = nn -- not here (?)
- local nnn = getnext(nn)
- if nnn and getid(nnn) == glyph_code and halant[getchar(nnn)] and getsubtype(nnn) < 256 and getfont(nnn) == font then
- n = nnn
- end
- else
- break
- end
- local nn = getnext(n)
- if nn and getid(nn) == glyph_code and consonant[getchar(nn)] and getsubtype(nn) < 256 and getfont(nn) == font then
- n = nn
- local nnn = getnext(nn)
- if nnn and getid(nnn) == glyph_code and nukta[getchar(nnn)] and getsubtype(nnn) < 256 and getfont(nnn) == font then
- n = nnn
- end
- c = n
- else
- break
- end
- else
- break
- end
- end
- --
- if not c then
- -- This shouldn't happen I guess.
- return
- end
- local n = getnext(c)
- if not n then
- return c
- end
- local v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
- if not v then
- return c
- end
- local char = getchar(n)
- if char == c_anudatta then
- c = n
- n = getnext(c)
- if not n then
- return c
- end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
- if not v then
- return c
- end
- char = getchar(n)
- end
- if halant[char] then
- c = getnext(c)
- n = getnext(c)
- if not n then
- return c
- end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
- if not v then
- return c
- end
- char = getchar(n)
- if char == c_zwnj or char == c_zwj then
- c = getnext(c)
- n = getnext(c)
- if not n then
- return c
- end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
- if not v then
- return c
- end
- char = getchar(n)
- end
- else
- -- c = ms_matra(c)
- -- same as one
- if dependent_vowel[char] then
- c = getnext(c)
- n = getnext(c)
- if not n then
- return c
- end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
- if not v then
- return c
- end
- char = getchar(n)
- end
- if nukta[char] then
- c = getnext(c)
- n = getnext(c)
- if not n then
- return c
- end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
- if not v then
- return c
- end
- char = getchar(n)
- end
- if halant[char] then
- c = getnext(c)
- n = getnext(c)
- if not n then
- return c
- end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
- if not v then
- return c
- end
- char = getchar(n)
- end
- end
- -- same as one
- if vowel_modifier[char] then
- c = getnext(c)
- n = getnext(c)
- if not n then
- return c
- end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
- if not v then
- return c
- end
- char = getchar(n)
- end
- if stress_tone_mark[char] then
- c = getnext(c)
- n = getnext(c)
- if not n then
- return c
- end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
- if not v then
- return c
- end
- char = getchar(n)
- end
- if stress_tone_mark[char] then
- return n
- else
- return c
- end
-end
-
-local function inject_syntax_error(head,current,mark)
- local signal = copy_node(current)
- copyinjection(signal,current)
- if mark == pre_mark then
- setchar(signal,dotted_circle)
- else
- setchar(current,dotted_circle)
- end
- return insert_node_after(head,current,signal)
-end
-
--- It looks like these two analyzers were written independently but they share
--- a lot. Common code has been synced.
-
-function methods.deva(head,font,attr)
- head = tonut(head)
- local current = head
- local start = true
- local done = false
- local nbspaces = 0
- while current do
- if getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font then
- done = true
- local syllablestart = current
- local syllableend = nil
- local c = current
- local n = getnext(c)
- if n and ra[getchar(c)] and getid(n) == glyph_code and halant[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
- local n = getnext(n)
- if n and getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font then
- c = n
- end
- end
- local standalone = getchar(c) == c_nbsp
- if standalone then
- local prev = getprev(current)
- if not prev then
- -- begin of paragraph or box
- elseif getid(prev) ~= glyph_code or getsubtype(prev) >= 256 or getfont(prev) ~= font then
- -- different font or language so quite certainly a different word
- elseif not separator[getchar(prev)] then
- -- something that separates words
- else
- standalone = false
- end
- end
- if standalone then
- -- stand alone cluster (at the start of the word only): #[Ra+H]+NBSP+[N]+[<[<ZWJ|ZWNJ>]+H+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
- local syllableend = analyze_next_chars_one(c,font,2)
- current = getnext(syllableend)
- if syllablestart ~= syllableend then
- head, current, nbspaces = deva_reorder(head,syllablestart,syllableend,font,attr,nbspaces)
- current = getnext(current)
- end
- else
- -- we can delay the getsubtype(n) and getfont(n) and test for say halant first
- -- as an table access is faster than two function calls (subtype and font are
- -- pseudo fields) but the code becomes messy (unless we make it a function)
- local char = getchar(current)
- if consonant[char] then
- -- syllable containing consonant
- local prevc = true
- while prevc do
- prevc = false
- local n = getnext(current)
- if not n then
- break
- end
- local v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
- if not v then
- break
- end
- local c = getchar(n)
- if nukta[c] then
- n = getnext(n)
- if not n then
- break
- end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
- if not v then
- break
- end
- c = getchar(n)
- end
- if halant[c] then
- n = getnext(n)
- if not n then
- break
- end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
- if not v then
- break
- end
- c = getchar(n)
- if c == c_zwnj or c == c_zwj then
- n = getnext(n)
- if not n then
- break
- end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
- if not v then
- break
- end
- c = getchar(n)
- end
- if consonant[c] then
- prevc = true
- current = n
- end
- end
- end
- local n = getnext(current)
- if n and getid(n) == glyph_code and nukta[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
- -- nukta (not specified in Microsft Devanagari OpenType specification)
- current = n
- n = getnext(current)
- end
- syllableend = current
- current = n
- if current then
- local v = getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font
- if v then
- if halant[getchar(current)] then
- -- syllable containing consonant without vowels: {C + [Nukta] + H} + C + H
- local n = getnext(current)
- if n and getid(n) == glyph_code and zw_char[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
- -- code collapsed, probably needs checking with intention
- syllableend = n
- current = getnext(n)
- else
- syllableend = current
- current = n
- end
- else
- -- syllable containing consonant with vowels: {C + [Nukta] + H} + C + [M] + [VM] + [SM]
- local c = getchar(current)
- if dependent_vowel[c] then
- syllableend = current
- current = getnext(current)
- v = current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font
- if v then
- c = getchar(current)
- end
- end
- if v and vowel_modifier[c] then
- syllableend = current
- current = getnext(current)
- v = current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font
- if v then
- c = getchar(current)
- end
- end
- if v and stress_tone_mark[c] then
- syllableend = current
- current = getnext(current)
- end
- end
- end
- end
- if syllablestart ~= syllableend then
- head, current, nbspaces = deva_reorder(head,syllablestart,syllableend,font,attr,nbspaces)
- current = getnext(current)
- end
- elseif independent_vowel[char] then
- -- syllable without consonants: VO + [VM] + [SM]
- syllableend = current
- current = getnext(current)
- if current then
- local v = getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font
- if v then
- local c = getchar(current)
- if vowel_modifier[c] then
- syllableend = current
- current = getnext(current)
- v = current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font
- if v then
- c = getchar(current)
- end
- end
- if v and stress_tone_mark[c] then
- syllableend = current
- current = getnext(current)
- end
- end
- end
- else
- local mark = mark_four[char]
- if mark then
- head, current = inject_syntax_error(head,current,mark)
- end
- current = getnext(current)
- end
- end
- else
- current = getnext(current)
- end
- start = false
- end
-
- if nbspaces > 0 then
- head = replace_all_nbsp(head)
- end
-
- head = tonode(head)
-
- return head, done
-end
-
--- there is a good change that when we run into one with subtype < 256 that the rest is also done
--- so maybe we can omit this check (it's pretty hard to get glyphs in the stream out of the blue)
-
--- handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,1)
-
-function methods.dev2(head,font,attr)
- head = tonut(head)
- local current = head
- local start = true
- local done = false
- local syllabe = 0
- local nbspaces = 0
- while current do
- local syllablestart, syllableend = nil, nil
- if getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font then
- done = true
- syllablestart = current
- local c = current
- local n = getnext(current)
- if n and ra[getchar(c)] and getid(n) == glyph_code and halant[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
- local n = getnext(n)
- if n and getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font then
- c = n
- end
- end
- local char = getchar(c)
- if independent_vowel[char] then
- -- vowel-based syllable: [Ra+H]+V+[N]+[<[<ZWJ|ZWNJ>]+H+C|ZWJ+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
- current = analyze_next_chars_one(c,font,1)
- syllableend = current
- else
- local standalone = char == c_nbsp
- if standalone then
- nbspaces = nbspaces + 1
- local p = getprev(current)
- if not p then
- -- begin of paragraph or box
- elseif getid(p) ~= glyph_code or getsubtype(p) >= 256 or getfont(p) ~= font then
- -- different font or language so quite certainly a different word
- elseif not separator[getchar(p)] then
- -- something that separates words
- else
- standalone = false
- end
- end
- if standalone then
- -- Stand Alone cluster (at the start of the word only): #[Ra+H]+NBSP+[N]+[<[<ZWJ|ZWNJ>]+H+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
- current = analyze_next_chars_one(c,font,2)
- syllableend = current
- elseif consonant[getchar(current)] then
- -- WHY current INSTEAD OF c ?
-
- -- Consonant syllable: {C+[N]+<H+[<ZWNJ|ZWJ>]|<ZWNJ|ZWJ>+H>} + C+[N]+[A] + [< H+[<ZWNJ|ZWJ>] | {M}+[N]+[H]>]+[SM]+[(VD)]
- current = analyze_next_chars_two(current,font) -- not c !
- syllableend = current
- end
- end
- end
- if syllableend then
- syllabe = syllabe + 1
- local c = syllablestart
- local n = getnext(syllableend)
- while c ~= n do
- setprop(c,a_syllabe,syllabe)
- c = getnext(c)
- end
- end
- if syllableend and syllablestart ~= syllableend then
- head, current, nbspaces = dev2_reorder(head,syllablestart,syllableend,font,attr,nbspaces)
- end
- if not syllableend and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font and not getprop(current,a_state) then
- local mark = mark_four[getchar(current)]
- if mark then
- head, current = inject_syntax_error(head,current,mark)
- end
- end
- start = false
- current = getnext(current)
- end
-
- if nbspaces > 0 then
- head = replace_all_nbsp(head)
- end
-
- head = tonode(head)
-
- return head, done
-end
-
-methods.mlym = methods.deva
-methods.mlm2 = methods.dev2
diff --git a/tex/context/base/mkiv/font-osd.lua b/tex/context/base/mkiv/font-osd.lua
index ca20f6782..2ee8a2286 100644
--- a/tex/context/base/mkiv/font-osd.lua
+++ b/tex/context/base/mkiv/font-osd.lua
@@ -6,13 +6,6 @@ if not modules then modules = { } end modules ['font-osd'] = { -- script devanag
license = "see context related readme files"
}
--- I'll optimize this one with ischar (much faster) when I see a reason (read: I need a
--- proper test case first).
-
--- This is a version of font-odv.lua adapted to the new font loader and more
--- direct hashing. The initialization code has been adapted (more efficient). One day
--- I'll speed this up ... char swapping and properties.
-
-- A few remarks:
--
-- This code is a partial rewrite of the code that deals with devanagari. The data and logic
@@ -29,7 +22,8 @@ if not modules then modules = { } end modules ['font-osd'] = { -- script devanag
-- much can get messed up in over a week work) it could be that I introduced bugs. There
-- is more to gain (esp in the functions applied to a range) but I'll do that when
-- everything works as expected. Kai's original code is kept in font-odk.lua as a reference
--- so blame me (HH) for bugs.
+-- so blame me (HH) for bugs. (We no longer ship that file as the code below has diverted
+-- too much and in the meantime has more than doubled in size.)
--
-- Interesting is that Kai managed to write this on top of the existing otf handler. Only a
-- few extensions were needed, like a few more analyzing states and dealing with changed
@@ -1140,7 +1134,7 @@ function handlers.devanagari_reorder_matras(head,start) -- no leak
head = remove_node(head,start)
setlink(start,next)
setlink(current,start)
- -- setlink(current,start,next) -- maybe
+ -- setlink(current,start,next) -- maybe
start = startnext
break
end
@@ -1200,7 +1194,7 @@ function handlers.devanagari_reorder_reph(head,start)
head = remove_node(head,start)
setlink(start,next)
setlink(current,start)
- -- setlink(current,start,next) -- maybe
+ -- setlink(current,start,next) -- maybe
start = startnext
startattr = getprop(start,a_syllabe)
break
@@ -1220,7 +1214,7 @@ function handlers.devanagari_reorder_reph(head,start)
head = remove_node(head,start)
setlink(getprev(current),start)
setlink(start,current)
- -- setlink(getprev(current),start,current) -- maybe
+ -- setlink(getprev(current),start,current) -- maybe
start = startnext
startattr = getprop(start,a_syllabe)
break
@@ -1254,7 +1248,7 @@ function handlers.devanagari_reorder_reph(head,start)
head = remove_node(head,start)
setlink(getprev(c),start)
setlink(start,c)
- -- setlink(getprev(c),start,c) -- maybe
+ -- setlink(getprev(c),start,c) -- maybe
-- end
start = startnext
startattr = getprop(start,a_syllabe)
@@ -1278,7 +1272,7 @@ function handlers.devanagari_reorder_reph(head,start)
head = remove_node(head,start)
setlink(start,getnext(current))
setlink(current,start)
- -- setlink(current,start,getnext(current)) -- maybe
+ -- setlink(current,start,getnext(current)) -- maybe
start = startnext
end
end
@@ -1322,7 +1316,7 @@ function handlers.devanagari_reorder_pre_base_reordering_consonants(head,start)
removenode(start,start)
setlink(start,next)
setlink(current,start)
- -- setlink(current,start,next) -- maybe
+ -- setlink(current,start,next) -- maybe
start = startnext
break
end
diff --git a/tex/context/base/mkiv/font-ota.lua b/tex/context/base/mkiv/font-ota.lua
index 232c2586a..743951d50 100644
--- a/tex/context/base/mkiv/font-ota.lua
+++ b/tex/context/base/mkiv/font-ota.lua
@@ -1,6 +1,6 @@
if not modules then modules = { } end modules ['font-ota'] = {
version = 1.001,
- comment = "companion to font-otf.lua (analysing)",
+ comment = "companion to font-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
diff --git a/tex/context/base/mkiv/font-otb.lua b/tex/context/base/mkiv/font-otb.lua
deleted file mode 100644
index a31079225..000000000
--- a/tex/context/base/mkiv/font-otb.lua
+++ /dev/null
@@ -1,705 +0,0 @@
-if not modules then modules = { } end modules ['font-otb'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-local concat = table.concat
-local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
-local type, next, tonumber, tostring, rawget = type, next, tonumber, tostring, rawget
-
-local trace_baseinit = false trackers.register("otf.baseinit", function(v) trace_baseinit = v end)
-local trace_singles = false trackers.register("otf.singles", function(v) trace_singles = v end)
-local trace_multiples = false trackers.register("otf.multiples", function(v) trace_multiples = v end)
-local trace_alternatives = false trackers.register("otf.alternatives", function(v) trace_alternatives = v end)
-local trace_ligatures = false trackers.register("otf.ligatures", function(v) trace_ligatures = v end)
-local trace_ligatures_detail = false trackers.register("otf.ligatures.detail", function(v) trace_ligatures_detail = v end)
-local trace_kerns = false trackers.register("otf.kerns", function(v) trace_kerns = v end)
-local trace_preparing = false trackers.register("otf.preparing", function(v) trace_preparing = v end)
-
-local report_prepare = logs.reporter("fonts","otf prepare")
-
-local fonts = fonts
-local otf = fonts.handlers.otf
-
-local otffeatures = otf.features
-local registerotffeature = otffeatures.register
-
-otf.defaultbasealternate = "none" -- first last
-
-local wildcard = "*"
-local default = "dflt"
-
-local formatters = string.formatters
-local f_unicode = formatters["%U"]
-local f_uniname = formatters["%U (%s)"]
-local f_unilist = formatters["% t (% t)"]
-
-local function gref(descriptions,n)
- if type(n) == "number" then
- local name = descriptions[n].name
- if name then
- return f_uniname(n,name)
- else
- return f_unicode(n)
- end
- elseif n then
- local num, nam, j = { }, { }, 0
- for i=1,#n do
- local ni = n[i]
- if tonumber(ni) then -- first is likely a key
- j = j + 1
- local di = descriptions[ni]
- num[j] = f_unicode(ni)
- nam[j] = di and di.name or "-"
- end
- end
- return f_unilist(num,nam)
- else
- return "<error in base mode tracing>"
- end
-end
-
-local function cref(feature,lookuptags,lookupname)
- if lookupname then
- return formatters["feature %a, lookup %a"](feature,lookuptags[lookupname])
- else
- return formatters["feature %a"](feature)
- end
-end
-
-local function report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,comment)
- report_prepare("%s: base alternate %s => %s (%S => %S)",
- cref(feature,lookuptags,lookupname),
- gref(descriptions,unicode),
- replacement and gref(descriptions,replacement),
- value,
- comment)
-end
-
-local function report_substitution(feature,lookuptags,lookupname,descriptions,unicode,substitution)
- report_prepare("%s: base substitution %s => %S",
- cref(feature,lookuptags,lookupname),
- gref(descriptions,unicode),
- gref(descriptions,substitution))
-end
-
-local function report_ligature(feature,lookuptags,lookupname,descriptions,unicode,ligature)
- report_prepare("%s: base ligature %s => %S",
- cref(feature,lookuptags,lookupname),
- gref(descriptions,ligature),
- gref(descriptions,unicode))
-end
-
-local function report_kern(feature,lookuptags,lookupname,descriptions,unicode,otherunicode,value)
- report_prepare("%s: base kern %s + %s => %S",
- cref(feature,lookuptags,lookupname),
- gref(descriptions,unicode),
- gref(descriptions,otherunicode),
- value)
-end
-
-local basemethods = { }
-local basemethod = "<unset>"
-
-local function applybasemethod(what,...)
- local m = basemethods[basemethod][what]
- if m then
- return m(...)
- end
-end
-
--- We need to make sure that luatex sees the difference between
--- base fonts that have different glyphs in the same slots in fonts
--- that have the same fullname (or filename). LuaTeX will merge fonts
--- eventually (and subset later on). If needed we can use a more
--- verbose name as long as we don't use <()<>[]{}/%> and the length
--- is < 128.
-
-local basehash, basehashes, applied = { }, 1, { }
-
-local function registerbasehash(tfmdata)
- local properties = tfmdata.properties
- local hash = concat(applied," ")
- local base = basehash[hash]
- if not base then
- basehashes = basehashes + 1
- base = basehashes
- basehash[hash] = base
- end
- properties.basehash = base
- properties.fullname = properties.fullname .. "-" .. base
- -- report_prepare("fullname base hash '%a, featureset %a",tfmdata.properties.fullname,hash)
- applied = { }
-end
-
-local function registerbasefeature(feature,value)
- applied[#applied+1] = feature .. "=" .. tostring(value)
-end
-
--- The original basemode ligature builder used the names of components
--- and did some expression juggling to get the chain right. The current
--- variant starts with unicodes but still uses names to make the chain.
--- This is needed because we have to create intermediates when needed
--- but use predefined snippets when available. To some extend the
--- current builder is more stupid but I don't worry that much about it
--- as ligatures are rather predicatable.
---
--- Personally I think that an ff + i == ffi rule as used in for instance
--- latin modern is pretty weird as no sane person will key that in and
--- expect a glyph for that ligature plus the following character. Anyhow,
--- as we need to deal with this, we do, but no guarantes are given.
---
--- latin modern dejavu
---
--- f+f 102 102 102 102
--- f+i 102 105 102 105
--- f+l 102 108 102 108
--- f+f+i 102 102 105
--- f+f+l 102 102 108 102 102 108
--- ff+i 64256 105 64256 105
--- ff+l 64256 108
---
--- As you can see here, latin modern is less complete than dejavu but
--- in practice one will not notice it.
---
--- The while loop is needed because we need to resolve for instance
--- pseudo names like hyphen_hyphen to endash so in practice we end
--- up with a bit too many definitions but the overhead is neglectable.
---
--- We can have changed[first] or changed[second] but it quickly becomes
--- messy if we need to take that into account.
-
-local trace = false
-
-local function finalize_ligatures(tfmdata,ligatures)
- local nofligatures = #ligatures
- if nofligatures > 0 then
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local unicodes = resources.unicodes -- we use rawget in order to avoid bulding the table
- local private = resources.private
- local alldone = false
- while not alldone do
- local done = 0
- for i=1,nofligatures do
- local ligature = ligatures[i]
- if ligature then
- local unicode, lookupdata = ligature[1], ligature[2]
- if trace_ligatures_detail then
- report_prepare("building % a into %a",lookupdata,unicode)
- end
- local size = #lookupdata
- local firstcode = lookupdata[1] -- [2]
- local firstdata = characters[firstcode]
- local okay = false
- if firstdata then
- local firstname = "ctx_" .. firstcode
- for i=1,size-1 do -- for i=2,size-1 do
- local firstdata = characters[firstcode]
- if not firstdata then
- firstcode = private
- if trace_ligatures_detail then
- report_prepare("defining %a as %a",firstname,firstcode)
- end
- unicodes[firstname] = firstcode
- firstdata = { intermediate = true, ligatures = { } }
- characters[firstcode] = firstdata
- descriptions[firstcode] = { name = firstname }
- private = private + 1
- end
- local target
- local secondcode = lookupdata[i+1]
- local secondname = firstname .. "_" .. secondcode
- if i == size - 1 then
- target = unicode
- if not rawget(unicodes,secondname) then
- unicodes[secondname] = unicode -- map final ligature onto intermediates
- end
- okay = true
- else
- target = rawget(unicodes,secondname)
- if not target then
- break
- end
- end
- if trace_ligatures_detail then
- report_prepare("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target)
- end
- local firstligs = firstdata.ligatures
- if firstligs then
- firstligs[secondcode] = { char = target }
- else
- firstdata.ligatures = { [secondcode] = { char = target } }
- end
- firstcode = target
- firstname = secondname
- end
- elseif trace_ligatures_detail then
- report_prepare("no glyph (%a,%a) for building %a",firstname,firstcode,target)
- end
- if okay then
- ligatures[i] = false
- done = done + 1
- end
- end
- end
- alldone = done == 0
- end
- if trace_ligatures_detail then
- for k, v in table.sortedhash(characters) do
- if v.ligatures then
- table.print(v,k)
- end
- end
- end
- resources.private = private
- return true
- end
-end
-
-local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local properties = tfmdata.properties
- local changed = tfmdata.changed
- local lookuphash = resources.lookuphash
- local lookuptypes = resources.lookuptypes
- local lookuptags = resources.lookuptags
-
- local ligatures = { }
- local alternate = tonumber(value) or true and 1
- local defaultalt = otf.defaultbasealternate
-
- local trace_singles = trace_baseinit and trace_singles
- local trace_alternatives = trace_baseinit and trace_alternatives
- local trace_ligatures = trace_baseinit and trace_ligatures
-
- local actions = {
- substitution = function(lookupdata,lookuptags,lookupname,description,unicode)
- if trace_singles then
- report_substitution(feature,lookuptags,lookupname,descriptions,unicode,lookupdata)
- end
- changed[unicode] = lookupdata
- end,
- alternate = function(lookupdata,lookuptags,lookupname,description,unicode)
- local replacement = lookupdata[alternate]
- if replacement then
- changed[unicode] = replacement
- if trace_alternatives then
- report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal")
- end
- elseif defaultalt == "first" then
- replacement = lookupdata[1]
- changed[unicode] = replacement
- if trace_alternatives then
- report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- elseif defaultalt == "last" then
- replacement = lookupdata[#data]
- if trace_alternatives then
- report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- else
- if trace_alternatives then
- report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown")
- end
- end
- end,
- ligature = function(lookupdata,lookuptags,lookupname,description,unicode)
- if trace_ligatures then
- report_ligature(feature,lookuptags,lookupname,descriptions,unicode,lookupdata)
- end
- ligatures[#ligatures+1] = { unicode, lookupdata }
- end,
- }
-
- for unicode, character in next, characters do
- local description = descriptions[unicode]
- local lookups = description.slookups
- if lookups then
- for l=1,#lookuplist do
- local lookupname = lookuplist[l]
- local lookupdata = lookups[lookupname]
- if lookupdata then
- local lookuptype = lookuptypes[lookupname]
- local action = actions[lookuptype]
- if action then
- action(lookupdata,lookuptags,lookupname,description,unicode)
- end
- end
- end
- end
- local lookups = description.mlookups
- if lookups then
- for l=1,#lookuplist do
- local lookupname = lookuplist[l]
- local lookuplist = lookups[lookupname]
- if lookuplist then
- local lookuptype = lookuptypes[lookupname]
- local action = actions[lookuptype]
- if action then
- for i=1,#lookuplist do
- action(lookuplist[i],lookuptags,lookupname,description,unicode)
- end
- end
- end
- end
- end
- end
- properties.hasligatures = finalize_ligatures(tfmdata,ligatures)
-end
-
-local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) -- todo what kind of kerns, currently all
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local properties = tfmdata.properties
- local lookuptags = resources.lookuptags
- local sharedkerns = { }
- local traceindeed = trace_baseinit and trace_kerns
- local haskerns = false
- for unicode, character in next, characters do
- local description = descriptions[unicode]
- local rawkerns = description.kerns -- shared
- if rawkerns then
- local s = sharedkerns[rawkerns]
- if s == false then
- -- skip
- elseif s then
- character.kerns = s
- else
- local newkerns = character.kerns
- local done = false
- for l=1,#lookuplist do
- local lookup = lookuplist[l]
- local kerns = rawkerns[lookup]
- if kerns then
- for otherunicode, value in next, kerns do
- if value == 0 then
- -- maybe no 0 test here
- elseif not newkerns then
- newkerns = { [otherunicode] = value }
- done = true
- if traceindeed then
- report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value)
- end
- elseif not newkerns[otherunicode] then -- first wins
- newkerns[otherunicode] = value
- done = true
- if traceindeed then
- report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value)
- end
- end
- end
- end
- end
- if done then
- sharedkerns[rawkerns] = newkerns
- character.kerns = newkerns -- no empty assignments
- haskerns = true
- else
- sharedkerns[rawkerns] = false
- end
- end
- end
- end
- properties.haskerns = haskerns
-end
-
-basemethods.independent = {
- preparesubstitutions = preparesubstitutions,
- preparepositionings = preparepositionings,
-}
-
-local function makefake(tfmdata,name,present)
- local resources = tfmdata.resources
- local private = resources.private
- local character = { intermediate = true, ligatures = { } }
- resources.unicodes[name] = private
- tfmdata.characters[private] = character
- tfmdata.descriptions[private] = { name = name }
- resources.private = private + 1
- present[name] = private
- return character
-end
-
-local function make_1(present,tree,name)
- for k, v in next, tree do
- if k == "ligature" then
- present[name] = v
- else
- make_1(present,v,name .. "_" .. k)
- end
- end
-end
-
-local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookuptags,lookupname)
- for k, v in next, tree do
- if k == "ligature" then
- local character = characters[preceding]
- if not character then
- if trace_baseinit then
- report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookuptags[lookupname],v,preceding)
- end
- character = makefake(tfmdata,name,present)
- end
- local ligatures = character.ligatures
- if ligatures then
- ligatures[unicode] = { char = v }
- else
- character.ligatures = { [unicode] = { char = v } }
- end
- if done then
- local d = done[lookupname]
- if not d then
- done[lookupname] = { "dummy", v }
- else
- d[#d+1] = v
- end
- end
- else
- local code = present[name] or unicode
- local name = name .. "_" .. k
- make_2(present,tfmdata,characters,v,name,code,k,done,lookuptags,lookupname)
- end
- end
-end
-
-local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local changed = tfmdata.changed
- local lookuphash = resources.lookuphash
- local lookuptypes = resources.lookuptypes
- local lookuptags = resources.lookuptags
-
- local ligatures = { }
- local alternate = tonumber(value) or true and 1
- local defaultalt = otf.defaultbasealternate
-
- local trace_singles = trace_baseinit and trace_singles
- local trace_alternatives = trace_baseinit and trace_alternatives
- local trace_ligatures = trace_baseinit and trace_ligatures
-
- for l=1,#lookuplist do
- local lookupname = lookuplist[l]
- local lookupdata = lookuphash[lookupname]
- local lookuptype = lookuptypes[lookupname]
- for unicode, data in next, lookupdata do
- if lookuptype == "substitution" then
- if trace_singles then
- report_substitution(feature,lookuptags,lookupname,descriptions,unicode,data)
- end
- changed[unicode] = data
- elseif lookuptype == "alternate" then
- local replacement = data[alternate]
- if replacement then
- changed[unicode] = replacement
- if trace_alternatives then
- report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal")
- end
- elseif defaultalt == "first" then
- replacement = data[1]
- changed[unicode] = replacement
- if trace_alternatives then
- report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- elseif defaultalt == "last" then
- replacement = data[#data]
- if trace_alternatives then
- report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- else
- if trace_alternatives then
- report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown")
- end
- end
- elseif lookuptype == "ligature" then
- ligatures[#ligatures+1] = { unicode, data, lookupname }
- if trace_ligatures then
- report_ligature(feature,lookuptags,lookupname,descriptions,unicode,data)
- end
- end
- end
- end
-
- local nofligatures = #ligatures
-
- if nofligatures > 0 then
-
- local characters = tfmdata.characters
- local present = { }
- local done = trace_baseinit and trace_ligatures and { }
-
- for i=1,nofligatures do
- local ligature = ligatures[i]
- local unicode, tree = ligature[1], ligature[2]
- make_1(present,tree,"ctx_"..unicode)
- end
-
- for i=1,nofligatures do
- local ligature = ligatures[i]
- local unicode, tree, lookupname = ligature[1], ligature[2], ligature[3]
- make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookuptags,lookupname)
- end
-
- end
-
-end
-
-local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local properties = tfmdata.properties
- local lookuphash = resources.lookuphash
- local lookuptags = resources.lookuptags
- local traceindeed = trace_baseinit and trace_kerns
- -- check out this sharedkerns trickery
- for l=1,#lookuplist do
- local lookupname = lookuplist[l]
- local lookupdata = lookuphash[lookupname]
- for unicode, data in next, lookupdata do
- local character = characters[unicode]
- local kerns = character.kerns
- if not kerns then
- kerns = { }
- character.kerns = kerns
- end
- if traceindeed then
- for otherunicode, kern in next, data do
- if not kerns[otherunicode] and kern ~= 0 then
- kerns[otherunicode] = kern
- report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,kern)
- end
- end
- else
- for otherunicode, kern in next, data do
- if not kerns[otherunicode] and kern ~= 0 then
- kerns[otherunicode] = kern
- end
- end
- end
- end
- end
-
-end
-
-local function initializehashes(tfmdata)
- nodeinitializers.features(tfmdata)
-end
-
-basemethods.shared = {
- initializehashes = initializehashes,
- preparesubstitutions = preparesubstitutions,
- preparepositionings = preparepositionings,
-}
-
-basemethod = "independent"
-
-local function featuresinitializer(tfmdata,value)
- if true then -- value then
- local starttime = trace_preparing and os.clock()
- local features = tfmdata.shared.features
- local fullname = tfmdata.properties.fullname or "?"
- if features then
- applybasemethod("initializehashes",tfmdata)
- local collectlookups = otf.collectlookups
- local rawdata = tfmdata.shared.rawdata
- local properties = tfmdata.properties
- local script = properties.script -- or "dflt" -- can be nil
- local language = properties.language -- or "dflt" -- can be nil
- local basesubstitutions = rawdata.resources.features.gsub
- local basepositionings = rawdata.resources.features.gpos
- --
- -- if basesubstitutions then
- -- for feature, data in next, basesubstitutions do
- -- local value = features[feature]
- -- if value then
- -- local validlookups, lookuplist = collectlookups(rawdata,feature,script,language)
- -- if validlookups then
- -- applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
- -- registerbasefeature(feature,value)
- -- end
- -- end
- -- end
- -- end
- -- if basepositionings then
- -- for feature, data in next, basepositionings do
- -- local value = features[feature]
- -- if value then
- -- local validlookups, lookuplist = collectlookups(rawdata,feature,script,language)
- -- if validlookups then
- -- applybasemethod("preparepositionings",tfmdata,feature,features[feature],validlookups,lookuplist)
- -- registerbasefeature(feature,value)
- -- end
- -- end
- -- end
- -- end
- --
- if basesubstitutions or basepositionings then
- local sequences = tfmdata.resources.sequences
- for s=1,#sequences do
- local sequence = sequences[s]
- local sfeatures = sequence.features
- if sfeatures then
- local order = sequence.order
- if order then
- for i=1,#order do --
- local feature = order[i]
- local value = features[feature]
- if value then
- local validlookups, lookuplist = collectlookups(rawdata,feature,script,language)
- if not validlookups then
- -- skip
- elseif basesubstitutions and basesubstitutions[feature] then
- if trace_preparing then
- report_prepare("filtering base %s feature %a for %a with value %a","sub",feature,fullname,value)
- end
- applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
- registerbasefeature(feature,value)
- elseif basepositionings and basepositionings[feature] then
- if trace_preparing then
- report_prepare("filtering base %a feature %a for %a with value %a","pos",feature,fullname,value)
- end
- applybasemethod("preparepositionings",tfmdata,feature,value,validlookups,lookuplist)
- registerbasefeature(feature,value)
- end
- end
- end
- end
- end
- end
- end
- --
- registerbasehash(tfmdata)
- end
- if trace_preparing then
- report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,fullname)
- end
- end
-end
-
-registerotffeature {
- name = "features",
- description = "features",
- default = true,
- initializers = {
- -- position = 1, -- after setscript (temp hack ... we need to force script / language to 1
- base = featuresinitializer,
- }
-}
-
--- independent : collect lookups independently (takes more runtime ... neglectable)
--- shared : shares lookups with node mode (takes more memory unless also a node mode variant is used ... noticeable)
-
-directives.register("fonts.otf.loader.basemethod", function(v)
- if basemethods[v] then
- basemethod = v
- end
-end)
diff --git a/tex/context/base/mkiv/font-otc.lua b/tex/context/base/mkiv/font-otc.lua
index 034cba613..0af588b74 100644
--- a/tex/context/base/mkiv/font-otc.lua
+++ b/tex/context/base/mkiv/font-otc.lua
@@ -1,6 +1,6 @@
if not modules then modules = { } end modules ['font-otc'] = {
version = 1.001,
- comment = "companion to font-otf.lua (context)",
+ comment = "companion to font-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
diff --git a/tex/context/base/mkiv/font-otf.lua b/tex/context/base/mkiv/font-otf.lua
deleted file mode 100644
index 1db80272e..000000000
--- a/tex/context/base/mkiv/font-otf.lua
+++ /dev/null
@@ -1,2968 +0,0 @@
-if not modules then modules = { } end modules ['font-otf'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- langs -> languages enz
--- anchor_classes vs kernclasses
--- modification/creationtime in subfont is runtime dus zinloos
--- to_table -> totable
--- ascent descent
-
--- to be checked: combinations like:
---
--- current="ABCD" with [A]=nothing, [BC]=ligature, [D]=single (applied to result of BC so funny index)
---
--- unlikely but possible
-
--- more checking against low level calls of functions
-
-local gmatch, gsub, find, match, lower, strip = string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
-local type, next, tonumber, tostring = type, next, tonumber, tostring
-local abs = math.abs
-local reversed, concat, insert, remove, sortedkeys = table.reversed, table.concat, table.insert, table.remove, table.sortedkeys
-local fastcopy, tohash, derivetable, copy = table.fastcopy, table.tohash, table.derive, table.copy
-local formatters = string.formatters
-local P, R, S, C, Ct, lpegmatch = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Ct, lpeg.match
-
-local setmetatableindex = table.setmetatableindex
-local allocate = utilities.storage.allocate
-local registertracker = trackers.register
-local registerdirective = directives.register
-local starttiming = statistics.starttiming
-local stoptiming = statistics.stoptiming
-local elapsedtime = statistics.elapsedtime
-local findbinfile = resolvers.findbinfile
-
-local trace_private = false registertracker("otf.private", function(v) trace_private = v end)
-local trace_subfonts = false registertracker("otf.subfonts", function(v) trace_subfonts = v end)
-local trace_loading = false registertracker("otf.loading", function(v) trace_loading = v end)
-local trace_features = false registertracker("otf.features", function(v) trace_features = v end)
-local trace_dynamics = false registertracker("otf.dynamics", function(v) trace_dynamics = v end)
-local trace_sequences = false registertracker("otf.sequences", function(v) trace_sequences = v end)
-local trace_markwidth = false registertracker("otf.markwidth", function(v) trace_markwidth = v end)
-local trace_defining = false registertracker("fonts.defining", function(v) trace_defining = v end)
-
-local compact_lookups = true registertracker("otf.compactlookups", function(v) compact_lookups = v end)
-local purge_names = true registertracker("otf.purgenames", function(v) purge_names = v end)
-
-local report_otf = logs.reporter("fonts","otf loading")
-
-local fonts = fonts
-local otf = fonts.handlers.otf
-
-otf.glists = { "gsub", "gpos" }
-
-otf.version = 2.826 -- beware: also sync font-mis.lua and in mtx-fonts
-otf.cache = containers.define("fonts", "otf", otf.version, true)
-
-local hashes = fonts.hashes
-local definers = fonts.definers
-local readers = fonts.readers
-local constructors = fonts.constructors
-
-local fontdata = hashes and hashes.identifiers
-local chardata = characters and characters.data -- not used
-
-local otffeatures = constructors.features.otf
-local registerotffeature = otffeatures.register
-
-local otfenhancers = constructors.enhancers.otf
-local registerotfenhancer = otfenhancers.register
-
-local forceload = false
-local cleanup = 0 -- mk: 0=885M 1=765M 2=735M (regular run 730M)
-local packdata = true
-local syncspace = true
-local forcenotdef = false
-local includesubfonts = false
-local overloadkerns = false -- experiment
-
-local applyruntimefixes = fonts.treatments and fonts.treatments.applyfixes
-
-local wildcard = "*"
-local default = "dflt"
-
-local fontloader = fontloader
-local open_font = fontloader.open
-local close_font = fontloader.close
-local font_fields = fontloader.fields
-local apply_featurefile = fontloader.apply_featurefile
-
-local mainfields = nil
-local glyphfields = nil -- not used yet
-
-local formats = fonts.formats
-
-formats.otf = "opentype"
-formats.ttf = "truetype"
-formats.ttc = "truetype"
-formats.dfont = "truetype"
-
-registerdirective("fonts.otf.loader.cleanup", function(v) cleanup = tonumber(v) or (v and 1) or 0 end)
-registerdirective("fonts.otf.loader.force", function(v) forceload = v end)
-registerdirective("fonts.otf.loader.pack", function(v) packdata = v end)
-registerdirective("fonts.otf.loader.syncspace", function(v) syncspace = v end)
-registerdirective("fonts.otf.loader.forcenotdef", function(v) forcenotdef = v end)
-registerdirective("fonts.otf.loader.overloadkerns", function(v) overloadkerns = v end)
------------------("fonts.otf.loader.alldimensions", function(v) alldimensions = v end)
-
-function otf.fileformat(filename)
- local leader = lower(io.loadchunk(filename,4))
- local suffix = lower(file.suffix(filename))
- if leader == "otto" then
- return formats.otf, suffix == "otf"
- elseif leader == "ttcf" then
- return formats.ttc, suffix == "ttc"
- -- elseif leader == "true" then
- -- return formats.ttf, suffix == "ttf"
- elseif suffix == "ttc" then
- return formats.ttc, true
- elseif suffix == "dfont" then
- return formats.dfont, true
- else
- return formats.ttf, suffix == "ttf"
- end
-end
-
--- local function otf_format(filename)
--- -- return formats[lower(file.suffix(filename))]
--- end
-
-local function otf_format(filename)
- local format, okay = otf.fileformat(filename)
- if not okay then
- report_otf("font %a is actually an %a file",filename,format)
- end
- return format
-end
-
-local function load_featurefile(raw,featurefile)
- if featurefile and featurefile ~= "" then
- if trace_loading then
- report_otf("using featurefile %a", featurefile)
- end
- apply_featurefile(raw, featurefile)
- end
-end
-
-local function showfeatureorder(rawdata,filename)
- local sequences = rawdata.resources.sequences
- if sequences and #sequences > 0 then
- if trace_loading then
- report_otf("font %a has %s sequences",filename,#sequences)
- report_otf(" ")
- end
- for nos=1,#sequences do
- local sequence = sequences[nos]
- local typ = sequence.type or "no-type"
- local name = sequence.name or "no-name"
- local subtables = sequence.subtables or { "no-subtables" }
- local features = sequence.features
- if trace_loading then
- report_otf("%3i %-15s %-20s [% t]",nos,name,typ,subtables)
- end
- if features then
- for feature, scripts in next, features do
- local tt = { }
- if type(scripts) == "table" then
- for script, languages in next, scripts do
- local ttt = { }
- for language, _ in next, languages do
- ttt[#ttt+1] = language
- end
- tt[#tt+1] = formatters["[%s: % t]"](script,ttt)
- end
- if trace_loading then
- report_otf(" %s: % t",feature,tt)
- end
- else
- if trace_loading then
- report_otf(" %s: %S",feature,scripts)
- end
- end
- end
- end
- end
- if trace_loading then
- report_otf("\n")
- end
- elseif trace_loading then
- report_otf("font %a has no sequences",filename)
- end
-end
-
---[[ldx--
-<p>We start with a lot of tables and related functions.</p>
---ldx]]--
-
-local valid_fields = table.tohash {
- -- "anchor_classes",
- "ascent",
- -- "cache_version",
- "cidinfo",
- "copyright",
- -- "creationtime",
- "descent",
- "design_range_bottom",
- "design_range_top",
- "design_size",
- "encodingchanged",
- "extrema_bound",
- "familyname",
- "fontname",
- "fontstyle_id",
- "fontstyle_name",
- "fullname",
- -- "glyphs",
- "hasvmetrics",
- -- "head_optimized_for_cleartype",
- "horiz_base",
- "issans",
- "isserif",
- "italicangle",
- -- "kerns",
- -- "lookups",
- "macstyle",
- -- "modificationtime",
- "notdef_loc",
- "onlybitmaps",
- "origname",
- "os2_version",
- "pfminfo",
- -- "private",
- "serifcheck",
- "sfd_version",
- -- "size",
- "strokedfont",
- "strokewidth",
- -- "subfonts",
- "table_version",
- -- "tables",
- -- "ttf_tab_saved",
- "ttf_tables",
- "uni_interp",
- "uniqueid",
- "units_per_em",
- "upos",
- "use_typo_metrics",
- "uwidth",
- "validation_state",
- "version",
- "vert_base",
- "weight",
- "weight_width_slope_only",
- -- "xuid",
- -- "truetype", -- maybe as check
-}
-
-local function adddimensions(data,filename)
- -- todo: forget about the width if it's the defaultwidth (saves mem)
- -- we could also build the marks hash here (instead of storing it)
- if data then
- local descriptions = data.descriptions
- local resources = data.resources
- local defaultwidth = resources.defaultwidth or 0
- local defaultheight = resources.defaultheight or 0
- local defaultdepth = resources.defaultdepth or 0
- local basename = trace_markwidth and file.basename(filename)
- for _, d in next, descriptions do
- local bb, wd = d.boundingbox, d.width
- if not wd then
- -- or bb?
- d.width = defaultwidth
- elseif trace_markwidth and wd ~= 0 and d.class == "mark" then
- report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename)
- -- d.width = -wd
- end
- if bb then
- local ht = bb[4]
- local dp = -bb[2]
- -- if alldimensions then
- -- if ht ~= 0 then
- -- d.height = ht
- -- end
- -- if dp ~= 0 then
- -- d.depth = dp
- -- end
- -- else
- if ht == 0 or ht < 0 then
- -- not set
- else
- d.height = ht
- end
- if dp == 0 or dp < 0 then
- -- not set
- else
- d.depth = dp
- end
- -- end
- end
- end
- end
-end
-
-function otf.load(filename,sub,featurefile) -- second argument (format) is gone !
- local base = file.basename(file.removesuffix(filename))
- local name = file.removesuffix(base)
- local attr = lfs.attributes(filename)
- local size = attr and attr.size or 0
- local time = attr and attr.modification or 0
- if featurefile then
- name = name .. "@" .. file.removesuffix(file.basename(featurefile))
- end
- -- or: sub = tonumber(sub)
- if sub == "" then
- sub = false
- end
- local hash = name
- if sub then
- hash = hash .. "-" .. sub
- end
- hash = containers.cleanname(hash)
- local featurefiles
- if featurefile then
- featurefiles = { }
- for s in gmatch(featurefile,"[^,]+") do
- local name = resolvers.findfile(file.addsuffix(s,'fea'),'fea') or ""
- if name == "" then
- report_otf("loading error, no featurefile %a",s)
- else
- local attr = lfs.attributes(name)
- featurefiles[#featurefiles+1] = {
- name = name,
- size = attr and attr.size or 0,
- time = attr and attr.modification or 0,
- }
- end
- end
- if #featurefiles == 0 then
- featurefiles = nil
- end
- end
- local data = containers.read(otf.cache,hash)
- local reload = not data or data.size ~= size or data.time ~= time
- if forceload then
- report_otf("forced reload of %a due to hard coded flag",filename)
- reload = true
- end
- if not reload then
- local featuredata = data.featuredata
- if featurefiles then
- if not featuredata or #featuredata ~= #featurefiles then
- reload = true
- else
- for i=1,#featurefiles do
- local fi, fd = featurefiles[i], featuredata[i]
- if fi.name ~= fd.name or fi.size ~= fd.size or fi.time ~= fd.time then
- reload = true
- break
- end
- end
- end
- elseif featuredata then
- reload = true
- end
- if reload then
- report_otf("loading: forced reload due to changed featurefile specification %a",featurefile)
- end
- end
- if reload then
- starttiming("fontloader")
- report_otf("loading %a, hash %a",filename,hash)
- local fontdata, messages
- if sub then
- fontdata, messages = open_font(filename,sub)
- else
- fontdata, messages = open_font(filename)
- end
- if fontdata then
- mainfields = mainfields or (font_fields and font_fields(fontdata))
- end
- if trace_loading and messages and #messages > 0 then
- if type(messages) == "string" then
- report_otf("warning: %s",messages)
- else
- for m=1,#messages do
- report_otf("warning: %S",messages[m])
- end
- end
- else
- report_otf("loading done")
- end
- if fontdata then
- if featurefiles then
- for i=1,#featurefiles do
- load_featurefile(fontdata,featurefiles[i].name)
- end
- end
- local unicodes = {
- -- names to unicodes
- }
- local splitter = lpeg.splitter(" ",unicodes)
- data = {
- size = size,
- time = time,
- subfont = sub,
- format = otf_format(filename),
- featuredata = featurefiles,
- resources = {
- filename = resolvers.unresolve(filename), -- no shortcut
- version = otf.version,
- creator = "context mkiv",
- unicodes = unicodes,
- indices = {
- -- index to unicodes
- },
- duplicates = {
- -- alternative unicodes
- },
- variants = {
- -- alternative unicodes (variants)
- },
- lookuptypes = {
- },
- },
- warnings = {
- },
- metadata = {
- -- raw metadata, not to be used
- },
- properties = {
- -- normalized metadata
- },
- descriptions = {
- },
- goodies = {
- },
- helpers = { -- might go away
- tounicodelist = splitter,
- tounicodetable = Ct(splitter),
- },
- }
- report_otf("file size: %s", size)
- otfenhancers.apply(data,filename,fontdata)
- local packtime = { }
- if packdata then
- if cleanup > 0 then
- collectgarbage("collect")
- end
- starttiming(packtime)
- otf.packdata(data,filename,nil) -- implemented elsewhere
- stoptiming(packtime)
- end
- report_otf("saving %a in cache",filename)
- data = containers.write(otf.cache, hash, data)
- if cleanup > 1 then
- collectgarbage("collect")
- end
- stoptiming("fontloader")
- if elapsedtime then
- report_otf("loading, optimizing, packing and caching time %s, pack time %s",
- elapsedtime("fontloader"),packdata and elapsedtime(packtime) or 0)
- end
- close_font(fontdata) -- free memory
- if cleanup > 3 then
- collectgarbage("collect")
- end
- data = containers.read(otf.cache, hash) -- this frees the old table and load the sparse one
- if cleanup > 2 then
- collectgarbage("collect")
- end
- else
- stoptiming("fontloader")
- data = nil
- report_otf("loading failed due to read error")
- end
- end
- if data then
- if trace_defining then
- report_otf("loading from cache using hash %a",hash)
- end
- otf.unpackdata(data,filename,nil,false) -- implemented elsewhere
- --
- local resources = data.resources
- local lookuptags = resources.lookuptags
- local unicodes = resources.unicodes
- if not lookuptags then
- lookuptags = { }
- resources.lookuptags = lookuptags
- end
- setmetatableindex(lookuptags,function(t,k)
- local v = type(k) == "number" and ("lookup " .. k) or k
- t[k] = v
- return v
- end)
- if not unicodes then
- unicodes = { }
- resources.unicodes = unicodes
- setmetatableindex(unicodes,function(t,k)
- -- use rawget when no table has to be built
- setmetatableindex(unicodes,nil)
- for u, d in next, data.descriptions do
- local n = d.name
- if n then
- t[n] = u
- -- report_otf("accessing known name %a",k)
- else
- -- report_otf("accessing unknown name %a",k)
- end
- end
- return rawget(t,k)
- end)
- end
- constructors.addcoreunicodes(unicodes) -- do we really need this?
- --
- if applyruntimefixes then
- applyruntimefixes(filename,data)
- end
- adddimensions(data,filename,nil,false)
- if trace_sequences then
- showfeatureorder(data,filename)
- end
- end
- return data
-end
-
-local mt = {
- __index = function(t,k) -- maybe set it
- if k == "height" then
- local ht = t.boundingbox[4]
- return ht < 0 and 0 or ht
- elseif k == "depth" then
- local dp = -t.boundingbox[2]
- return dp < 0 and 0 or dp
- elseif k == "width" then
- return 0
- elseif k == "name" then -- or maybe uni*
- return forcenotdef and ".notdef"
- end
- end
-}
-
-local function enhance_prepare_tables(data,filename,raw)
- data.properties.hasitalics = false
-end
-
-local function somecopy(old) -- fast one
- if old then
- local new = { }
- if type(old) == "table" then
- for k, v in next, old do
- if k == "glyphs" then
- -- skip
- elseif type(v) == "table" then
- new[k] = somecopy(v)
- else
- new[k] = v
- end
- end
- else
- for i=1,#mainfields do
- local k = mainfields[i]
- local v = old[k]
- if k == "glyphs" then
- -- skip
- elseif type(v) == "table" then
- new[k] = somecopy(v)
- else
- new[k] = v
- end
- end
- end
- return new
- else
- return { }
- end
-end
-
--- not setting hasitalics and class (when nil) during table construction can save some mem
-
-local function enhance_prepare_glyphs(data,filename,raw)
- local rawglyphs = raw.glyphs
- local rawsubfonts = raw.subfonts
- local rawcidinfo = raw.cidinfo
- local criterium = constructors.privateoffset
- local private = criterium
- local resources = data.resources
- local metadata = data.metadata
- local properties = data.properties
- local descriptions = data.descriptions
- local unicodes = resources.unicodes -- name to unicode
- local indices = resources.indices -- index to unicode
- local duplicates = resources.duplicates
- local variants = resources.variants
- local notdefindex = -1
-
- if rawsubfonts then
-
- metadata.subfonts = includesubfonts and { }
- properties.cidinfo = rawcidinfo
-
- if rawcidinfo.registry then
- local cidmap = fonts.cid.getmap(rawcidinfo)
- if cidmap then
- rawcidinfo.usedname = cidmap.usedname
- local nofnames = 0
- local nofunicodes = 0
- local cidunicodes = cidmap.unicodes
- local cidnames = cidmap.names
- local cidtotal = 0
- local unique = trace_subfonts and { }
- for cidindex=1,#rawsubfonts do
- local subfont = rawsubfonts[cidindex]
- local cidglyphs = subfont.glyphs
- if includesubfonts then
- metadata.subfonts[cidindex] = somecopy(subfont)
- end
- local cidcnt = subfont.glyphcnt
- local cidmin = subfont.glyphmin
- local cidmax = subfont.glyphmax
- local notdef = (tonumber(raw.table_version) or 0) > 0.4 and subfont.notdef_loc or -1
- if notdeffound == -1 and notdef >= 0 then
- notdeffound = notdef
- end
- if trace_subfonts then
- local cidtot = cidmax - cidmin + 1
- cidtotal = cidtotal + cidtot
- report_otf("subfont: %i, min: %i, max: %i, cnt: %i, n: %i",cidindex,cidmin,cidmax,cidtot,cidcnt)
- end
- if cidcnt > 0 then
- for index=cidmin,cidmax do
- local glyph = cidglyphs[index]
- if glyph then
- if trace_subfonts then
- unique[index] = true
- end
- local unicode = glyph.unicode
- if unicode >= 0x00E000 and unicode <= 0x00F8FF then
- unicode = -1
- elseif unicode >= 0x0F0000 and unicode <= 0x0FFFFD then
- unicode = -1
- elseif unicode >= 0x100000 and unicode <= 0x10FFFD then
- unicode = -1
- end
- local name = glyph.name or cidnames[index]
- if not unicode or unicode == -1 then -- or unicode >= criterium then
- unicode = cidunicodes[index]
- end
- if unicode and descriptions[unicode] then
- if trace_private then
- report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode)
- end
- unicode = -1
- end
- if not unicode or unicode == -1 then -- or unicode >= criterium then
- if not name then
- name = formatters["u%06X.ctx"](private)
- end
- unicode = private
- unicodes[name] = private
- if trace_private then
- report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private)
- end
- private = private + 1
- nofnames = nofnames + 1
- else
- -- if unicode > criterium then
- -- local taken = descriptions[unicode]
- -- if taken then
- -- private = private + 1
- -- descriptions[private] = taken
- -- unicodes[taken.name] = private
- -- indices[taken.index] = private
- -- if trace_private then
- -- report_otf("slot %U is moved to %U due to private in font",unicode)
- -- end
- -- end
- -- end
- if not name then
- name = formatters["u%06X.ctx"](unicode)
- end
- unicodes[name] = unicode
- nofunicodes = nofunicodes + 1
- end
- indices[index] = unicode -- each index is unique (at least now)
- local description = {
- -- width = glyph.width,
- boundingbox = glyph.boundingbox,
- -- name = glyph.name or name or "unknown", -- uniXXXX
- name = name or "unknown", -- uniXXXX
- -- cidindex = cidindex,
- index = index,
- glyph = glyph,
- }
- descriptions[unicode] = description
- local altuni = glyph.altuni
- if altuni then
- -- local d
- for i=1,#altuni do
- local a = altuni[i]
- local u = a.unicode
- if u ~= unicode then
- local v = a.variant
- if v then
- -- tricky: no addition to d? needs checking but in practice such dups are either very simple
- -- shapes or e.g cjk with not that many features
- local vv = variants[v]
- if vv then
- vv[u] = unicode
- else -- xits-math has some:
- vv = { [u] = unicode }
- variants[v] = vv
- end
- -- elseif d then
- -- d[#d+1] = u
- -- else
- -- d = { u }
- end
- end
- end
- -- if d then
- -- duplicates[unicode] = d -- is this needed ?
- -- end
- end
- end
- end
- else
- report_otf("potential problem: no glyphs found in subfont %i",cidindex)
- end
- end
- if trace_subfonts then
- report_otf("nofglyphs: %i, unique: %i",cidtotal,table.count(unique))
- end
- if trace_loading then
- report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes, nofnames, nofunicodes+nofnames)
- end
- elseif trace_loading then
- report_otf("unable to remap cid font, missing cid file for %a",filename)
- end
- elseif trace_loading then
- report_otf("font %a has no glyphs",filename)
- end
-
- else
-
- local cnt = raw.glyphcnt or 0
- local min = raw.glyphmin or 0
- local max = raw.glyphmax or (raw.glyphcnt - 1)
- notdeffound = (tonumber(raw.table_version) or 0) > 0.4 and raw.notdef_loc or -1
- if cnt > 0 then
- for index=min,max do
- local glyph = rawglyphs[index]
- if glyph then
- local unicode = glyph.unicode
- local name = glyph.name
- if not unicode or unicode == -1 then -- or unicode >= criterium then
- unicode = private
- unicodes[name] = private
- if trace_private then
- report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private)
- end
- private = private + 1
- else
- -- We have a font that uses and exposes the private area. As this is rather unreliable it's
- -- advised no to trust slots here (better use glyphnames). Anyway, we need a double check:
- -- we need to move already moved entries and we also need to bump the next private to after
- -- the (currently) last slot. This could leave us with a hole but we have holes anyway.
- if unicode > criterium then
- -- \definedfont[file:HANBatang-LVT.ttf] \fontchar{uF0135} \char"F0135
- local taken = descriptions[unicode]
- if taken then
- if unicode >= private then
- private = unicode + 1 -- restart private (so we can have mixed now)
- else
- private = private + 1 -- move on
- end
- descriptions[private] = taken
- unicodes[taken.name] = private
- indices[taken.index] = private
- if trace_private then
- report_otf("slot %U is moved to %U due to private in font",unicode)
- end
- else
- if unicode >= private then
- private = unicode + 1 -- restart (so we can have mixed now)
- end
- end
- end
- unicodes[name] = unicode
- end
- indices[index] = unicode
- -- if not name then
- -- name = formatters["u%06X"](unicode) -- u%06X.ctx
- -- end
- descriptions[unicode] = {
- -- width = glyph.width,
- boundingbox = glyph.boundingbox,
- name = name,
- index = index,
- glyph = glyph,
- }
- local altuni = glyph.altuni
- if altuni then
- -- local d
- for i=1,#altuni do
- local a = altuni[i]
- local u = a.unicode
- if u ~= unicode then
- local v = a.variant
- if v then
- -- tricky: no addition to d? needs checking but in practice such dups are either very simple
- -- shapes or e.g cjk with not that many features
- local vv = variants[v]
- if vv then
- vv[u] = unicode
- else -- xits-math has some:
- vv = { [u] = unicode }
- variants[v] = vv
- end
- -- elseif d then
- -- d[#d+1] = u
- -- else
- -- d = { u }
- end
- end
- end
- -- if d then
- -- duplicates[unicode] = d -- is this needed ?
- -- end
- end
- else
- report_otf("potential problem: glyph %U is used but empty",index)
- end
- end
- else
- report_otf("potential problem: no glyphs found")
- end
-
- end
-
- if notdeffound == -1 then
- report_otf("warning: no .notdef found in %a",filename)
- elseif notdeffound ~= 0 then
- report_otf("warning: .notdef found at position %a in %a",notdeffound,filename)
- end
- metadata.notdef = notdeffound
-
- resources.private = private
-
-end
-
--- the next one is still messy but will get better when we have
--- flattened map/enc tables in the font loader
-
--- the next one is not using a valid base for unicode privates
---
--- PsuedoEncodeUnencoded(EncMap *map,struct ttfinfo *info)
-
-local function enhance_check_encoding(data,filename,raw)
- local descriptions = data.descriptions
- local resources = data.resources
- local properties = data.properties
- local unicodes = resources.unicodes -- name to unicode
- local indices = resources.indices -- index to unicodes
- local duplicates = resources.duplicates
-
- -- begin of messy (not needed when cidmap)
-
- local mapdata = raw.map or { }
- local unicodetoindex = mapdata and mapdata.map or { }
- local indextounicode = mapdata and mapdata.backmap or { }
- -- local encname = lower(data.enc_name or raw.enc_name or mapdata.enc_name or "")
- local encname = lower(data.enc_name or mapdata.enc_name or "")
- local criterium = 0xFFFF -- for instance cambria has a lot of mess up there
- local privateoffset = constructors.privateoffset
-
- -- end of messy
-
- if find(encname,"unicode") then -- unicodebmp, unicodefull, ...
- if trace_loading then
- report_otf("checking embedded unicode map %a",encname)
- end
- local reported = { }
- -- we loop over the original unicode->index mapping but we
- -- need to keep in mind that that one can have weird entries
- -- so we need some extra checking
- for maybeunicode, index in next, unicodetoindex do
- if descriptions[maybeunicode] then
- -- we ignore invalid unicodes (unicode = -1) (ff can map wrong to non private)
- else
- local unicode = indices[index]
- if not unicode then
- -- weird (cjk or so?)
- elseif maybeunicode == unicode then
- -- no need to add
- elseif unicode > privateoffset then
- -- we have a non-unicode
- else
- local d = descriptions[unicode]
- if d then
- local c = d.copies
- if c then
- c[maybeunicode] = true
- else
- d.copies = { [maybeunicode] = true }
- end
- elseif index and not reported[index] then
- report_otf("missing index %i",index)
- reported[index] = true
- end
- end
- end
- end
- for unicode, data in next, descriptions do
- local d = data.copies
- if d then
- duplicates[unicode] = sortedkeys(d)
- data.copies = nil
- end
- end
- elseif properties.cidinfo then
- report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname)
- else
- report_otf("warning: non unicode map %a, only using glyph unicode data",encname or "whatever")
- end
-
- if mapdata then
- mapdata.map = { } -- clear some memory (virtual and created each time anyway)
- mapdata.backmap = { } -- clear some memory (virtual and created each time anyway)
- end
-end
-
--- for the moment we assume that a font with lookups will not use
--- altuni so we stick to kerns only .. alternatively we can always
--- do an indirect lookup uni_to_uni . but then we need that in
--- all lookups
-
-local function enhance_add_duplicates(data,filename,raw)
- local descriptions = data.descriptions
- local resources = data.resources
- local properties = data.properties
- local unicodes = resources.unicodes -- name to unicode
- local indices = resources.indices -- index to unicodes
- local duplicates = resources.duplicates
- -- for unicode, d in next, duplicates do
- for unicode, d in table.sortedhash(duplicates) do -- nicer for log
- local nofduplicates = #d
- if nofduplicates > 4 then
- if trace_loading then
- report_otf("ignoring excessive duplicates of %U (n=%s)",unicode,nofduplicates)
- end
- else
- -- local validduplicates = { }
- for i=1,nofduplicates do
- local u = d[i]
- if not descriptions[u] then
- local description = descriptions[unicode]
- local n = 0
- for _, description in next, descriptions do
- local kerns = description.kerns
- if kerns then
- for _, k in next, kerns do
- local ku = k[unicode]
- if ku then
- k[u] = ku
- n = n + 1
- end
- end
- end
- -- todo: lookups etc
- end
- if u > 0 then -- and
- local duplicate = copy(description) -- else packing problem
- duplicate.comment = formatters["copy of %U"](unicode)
- descriptions[u] = duplicate
- -- validduplicates[#validduplicates+1] = u
- if trace_loading then
- report_otf("duplicating %06U to %06U with index %H (%s kerns)",unicode,u,description.index,n)
- end
- end
- end
- end
- -- duplicates[unicode] = #validduplicates > 0 and validduplicates or nil
- end
- end
-end
-
--- class : nil base mark ligature component (maybe we don't need it in description)
--- boundingbox: split into ht/dp takes more memory (larger tables and less sharing)
-
-local function enhance_analyze_glyphs(data,filename,raw) -- maybe integrate this in the previous
- local descriptions = data.descriptions
- local resources = data.resources
- local metadata = data.metadata
- local properties = data.properties
- local hasitalics = false
- local widths = { }
- local marks = { } -- always present (saves checking)
- for unicode, description in next, descriptions do
- local glyph = description.glyph
- local italic = glyph.italic_correction -- only in a math font (we also have vert/horiz)
- if not italic then
- -- skip
- elseif italic == 0 then
- -- skip
- else
- description.italic = italic
- hasitalics = true
- end
- local width = glyph.width
- widths[width] = (widths[width] or 0) + 1
- local class = glyph.class
- if class then
- if class == "mark" then
- marks[unicode] = true
- end
- description.class = class
- end
- end
- -- flag italic
- properties.hasitalics = hasitalics
- -- flag marks
- resources.marks = marks
- -- share most common width for cjk fonts
- local wd, most = 0, 1
- for k,v in next, widths do
- if v > most then
- wd, most = k, v
- end
- end
- if most > 1000 then -- maybe 500
- if trace_loading then
- report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most)
- end
- for unicode, description in next, descriptions do
- if description.width == wd then
- -- description.width = nil
- else
- description.width = description.glyph.width
- end
- end
- resources.defaultwidth = wd
- else
- for unicode, description in next, descriptions do
- description.width = description.glyph.width
- end
- end
-end
-
-local function enhance_reorganize_mark_classes(data,filename,raw)
- local mark_classes = raw.mark_classes
- if mark_classes then
- local resources = data.resources
- local unicodes = resources.unicodes
- local markclasses = { }
- resources.markclasses = markclasses -- reversed
- for name, class in next, mark_classes do
- local t = { }
- for s in gmatch(class,"[^ ]+") do
- t[unicodes[s]] = true
- end
- markclasses[name] = t
- end
- end
-end
-
-local function enhance_reorganize_features(data,filename,raw) -- combine with other
- local features = { }
- data.resources.features = features
- for k=1,#otf.glists do
- local what = otf.glists[k]
- local dw = raw[what]
- if dw then
- local f = { }
- features[what] = f
- for i=1,#dw do
- local d= dw[i]
- local dfeatures = d.features
- if dfeatures then
- for i=1,#dfeatures do
- local df = dfeatures[i]
- local tag = strip(lower(df.tag))
- local ft = f[tag]
- if not ft then
- ft = { }
- f[tag] = ft
- end
- local dscripts = df.scripts
- for i=1,#dscripts do
- local d = dscripts[i]
- local languages = d.langs
- local script = strip(lower(d.script))
- local fts = ft[script] if not fts then fts = {} ft[script] = fts end
- for i=1,#languages do
- fts[strip(lower(languages[i]))] = true
- end
- end
- end
- end
- end
- end
- end
-end
-
-local function enhance_reorganize_anchor_classes(data,filename,raw)
- local resources = data.resources
- local anchor_to_lookup = { }
- local lookup_to_anchor = { }
- resources.anchor_to_lookup = anchor_to_lookup
- resources.lookup_to_anchor = lookup_to_anchor
- local classes = raw.anchor_classes -- anchor classes not in final table
- if classes then
- for c=1,#classes do
- local class = classes[c]
- local anchor = class.name
- local lookups = class.lookup
- if type(lookups) ~= "table" then
- lookups = { lookups }
- end
- local a = anchor_to_lookup[anchor]
- if not a then
- a = { }
- anchor_to_lookup[anchor] = a
- end
- for l=1,#lookups do
- local lookup = lookups[l]
- local l = lookup_to_anchor[lookup]
- if l then
- l[anchor] = true
- else
- l = { [anchor] = true }
- lookup_to_anchor[lookup] = l
- end
- a[lookup] = true
- end
- end
- end
-end
-
--- local function checklookups(data,missing,nofmissing)
--- local resources = data.resources
--- local unicodes = resources.unicodes
--- local lookuptypes = resources.lookuptypes
--- if not unicodes or not lookuptypes then
--- return
--- elseif nofmissing <= 0 then
--- return
--- end
--- local descriptions = data.descriptions
--- local private = fonts.constructors and fonts.constructors.privateoffset or 0xF0000 -- 0x10FFFF
--- --
--- local ns, nl = 0, 0
---
--- local guess = { }
--- -- helper
--- local function check(gname,code,unicode)
--- local description = descriptions[code]
--- -- no need to add a self reference
--- local variant = description.name
--- if variant == gname then
--- return
--- end
--- -- the variant already has a unicode (normally that results in a default tounicode to self)
--- local unic = unicodes[variant]
--- if unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then
--- -- no default mapping and therefore maybe no tounicode yet
--- else
--- return
--- end
--- -- the variant already has a tounicode
--- if descriptions[code].unicode then
--- return
--- end
--- -- add to the list
--- local g = guess[variant]
--- -- local r = overloads[unicode]
--- -- if r then
--- -- unicode = r.unicode
--- -- end
--- if g then
--- g[gname] = unicode
--- else
--- guess[variant] = { [gname] = unicode }
--- end
--- end
--- --
--- for unicode, description in next, descriptions do
--- local slookups = description.slookups
--- if slookups then
--- local gname = description.name
--- for tag, data in next, slookups do
--- local lookuptype = lookuptypes[tag]
--- if lookuptype == "alternate" then
--- for i=1,#data do
--- check(gname,data[i],unicode)
--- end
--- elseif lookuptype == "substitution" then
--- check(gname,data,unicode)
--- end
--- end
--- end
--- local mlookups = description.mlookups
--- if mlookups then
--- local gname = description.name
--- for tag, list in next, mlookups do
--- local lookuptype = lookuptypes[tag]
--- if lookuptype == "alternate" then
--- for i=1,#list do
--- local data = list[i]
--- for i=1,#data do
--- check(gname,data[i],unicode)
--- end
--- end
--- elseif lookuptype == "substitution" then
--- for i=1,#list do
--- check(gname,list[i],unicode)
--- end
--- end
--- end
--- end
--- end
--- -- resolve references
--- local done = true
--- while done do
--- done = false
--- for k, v in next, guess do
--- if type(v) ~= "number" then
--- for kk, vv in next, v do
--- if vv == -1 or vv >= private or (vv >= 0xE000 and vv <= 0xF8FF) or vv == 0xFFFE or vv == 0xFFFF then
--- local uu = guess[kk]
--- if type(uu) == "number" then
--- guess[k] = uu
--- done = true
--- end
--- else
--- guess[k] = vv
--- done = true
--- end
--- end
--- end
--- end
--- end
--- -- wrap up
--- local orphans = 0
--- local guessed = 0
--- for k, v in next, guess do
--- if type(v) == "number" then
--- descriptions[unicodes[k]].unicode = descriptions[v].unicode or v -- can also be a table
--- guessed = guessed + 1
--- else
--- local t = nil
--- local l = lower(k)
--- local u = unicodes[l]
--- if not u then
--- orphans = orphans + 1
--- elseif u == -1 or u >= private or (u >= 0xE000 and u <= 0xF8FF) or u == 0xFFFE or u == 0xFFFF then
--- local unicode = descriptions[u].unicode
--- if unicode then
--- descriptions[unicodes[k]].unicode = unicode
--- guessed = guessed + 1
--- else
--- orphans = orphans + 1
--- end
--- else
--- orphans = orphans + 1
--- end
--- end
--- end
--- if trace_loading and orphans > 0 or guessed > 0 then
--- report_otf("%s glyphs with no related unicode, %s guessed, %s orphans",guessed+orphans,guessed,orphans)
--- end
--- end
-
-local function enhance_prepare_tounicode(data,filename,raw)
- fonts.mappings.addtounicode(data,filename)
-end
-
-local g_directions = {
- gsub_contextchain = 1,
- gpos_contextchain = 1,
- -- gsub_context = 1,
- -- gpos_context = 1,
- gsub_reversecontextchain = -1,
- gpos_reversecontextchain = -1,
-}
--- The following is no longer needed as AAT is ignored per end October 2013.
---
--- -- Research by Khaled Hosny has demonstrated that the font loader merges
--- -- regular and AAT features and that these can interfere (especially because
--- -- we dropped checking for valid features elsewhere. So, we just check for
--- -- the special flag and drop the feature if such a tag is found.
---
--- local function supported(features)
--- for i=1,#features do
--- if features[i].ismac then
--- return false
--- end
--- end
--- return true
--- end
-
-local function enhance_reorganize_subtables(data,filename,raw)
- local resources = data.resources
- local sequences = { }
- local lookups = { }
- local chainedfeatures = { }
- resources.sequences = sequences
- resources.lookups = lookups -- we also have lookups in data itself
- for k=1,#otf.glists do
- local what = otf.glists[k]
- local dw = raw[what]
- if dw then
- for k=1,#dw do
- local gk = dw[k]
- local features = gk.features
- -- if not features or supported(features) then -- not always features !
- local typ = gk.type
- local chain = g_directions[typ] or 0
- local subtables = gk.subtables
- if subtables then
- local t = { }
- for s=1,#subtables do
- t[s] = subtables[s].name
- end
- subtables = t
- end
- local flags, markclass = gk.flags, nil
- if flags then
- local t = { -- forcing false packs nicer
- (flags.ignorecombiningmarks and "mark") or false,
- (flags.ignoreligatures and "ligature") or false,
- (flags.ignorebaseglyphs and "base") or false,
- flags.r2l or false,
- }
- markclass = flags.mark_class
- if markclass then
- markclass = resources.markclasses[markclass]
- end
- flags = t
- end
- --
- local name = gk.name
- --
- if not name then
- -- in fact an error
- report_otf("skipping weird lookup number %s",k)
- elseif features then
- -- scripts, tag, ismac
- local f = { }
- local o = { }
- for i=1,#features do
- local df = features[i]
- local tag = strip(lower(df.tag))
- local ft = f[tag]
- if not ft then
- ft = { }
- f[tag] = ft
- o[#o+1] = tag
- end
- local dscripts = df.scripts
- for i=1,#dscripts do
- local d = dscripts[i]
- local languages = d.langs
- local script = strip(lower(d.script))
- local fts = ft[script] if not fts then fts = {} ft[script] = fts end
- for i=1,#languages do
- fts[strip(lower(languages[i]))] = true
- end
- end
- end
- sequences[#sequences+1] = {
- type = typ,
- chain = chain,
- flags = flags,
- name = name,
- subtables = subtables,
- markclass = markclass,
- features = f,
- order = o,
- }
- else
- lookups[name] = {
- type = typ,
- chain = chain,
- flags = flags,
- subtables = subtables,
- markclass = markclass,
- }
- end
- -- end
- end
- end
- end
-end
-
-local function enhance_prepare_lookups(data,filename,raw)
- local lookups = raw.lookups
- if lookups then
- data.lookups = lookups
- end
-end
-
--- The reverse handler does a bit redundant splitting but it's seldom
--- seen so we don't bother too much. We could store the replacement
--- in the current list (value instead of true) but it makes other code
--- uglier. Maybe some day.
-
-local function t_uncover(splitter,cache,covers)
- local result = { }
- for n=1,#covers do
- local cover = covers[n]
- local uncovered = cache[cover]
- if not uncovered then
- uncovered = lpegmatch(splitter,cover)
- cache[cover] = uncovered
- end
- result[n] = uncovered
- end
- return result
-end
-
-local function s_uncover(splitter,cache,cover)
- if cover == "" then
- return nil
- else
- local uncovered = cache[cover]
- if not uncovered then
- uncovered = lpegmatch(splitter,cover)
- -- for i=1,#uncovered do
- -- uncovered[i] = { [uncovered[i]] = true }
- -- end
- cache[cover] = uncovered
- end
- return { uncovered }
- end
-end
-
-local function t_hashed(t,cache)
- if t then
- local ht = { }
- for i=1,#t do
- local ti = t[i]
- local tih = cache[ti]
- if not tih then
- local tn = #ti
- if tn == 1 then
- tih = { [ti[1]] = true }
- else
- tih = { }
- for i=1,tn do
- tih[ti[i]] = true
- end
- end
- cache[ti] = tih
- end
- ht[i] = tih
- end
- return ht
- else
- return nil
- end
-end
-
--- local s_hashed = t_hashed
-
-local function s_hashed(t,cache)
- if t then
- local tf = t[1]
- local nf = #tf
- if nf == 1 then
- return { [tf[1]] = true }
- else
- local ht = { }
- for i=1,nf do
- ht[i] = { [tf[i]] = true }
- end
- return ht
- end
- else
- return nil
- end
-end
-
-local function r_uncover(splitter,cache,cover,replacements)
- if cover == "" then
- return nil
- else
- -- we always have current as { } even in the case of one
- local uncovered = cover[1]
- local replaced = cache[replacements]
- if not replaced then
- replaced = lpegmatch(splitter,replacements)
- cache[replacements] = replaced
- end
- local nu, nr = #uncovered, #replaced
- local r = { }
- if nu == nr then
- for i=1,nu do
- r[uncovered[i]] = replaced[i]
- end
- end
- return r
- end
-end
-
-local function enhance_reorganize_lookups(data,filename,raw) -- we could check for "" and n == 0
- -- we prefer the before lookups in a normal order
- if data.lookups then
- local helpers = data.helpers
- local duplicates = data.resources.duplicates
- local splitter = helpers.tounicodetable
- local t_u_cache = { }
- local s_u_cache = t_u_cache -- string keys
- local t_h_cache = { }
- local s_h_cache = t_h_cache -- table keys (so we could use one cache)
- local r_u_cache = { } -- maybe shared
- helpers.matchcache = t_h_cache -- so that we can add duplicates
- --
- for _, lookup in next, data.lookups do
- local rules = lookup.rules
- if rules then
- local format = lookup.format
- if format == "class" then
- local before_class = lookup.before_class
- if before_class then
- before_class = t_uncover(splitter,t_u_cache,reversed(before_class))
- end
- local current_class = lookup.current_class
- if current_class then
- current_class = t_uncover(splitter,t_u_cache,current_class)
- end
- local after_class = lookup.after_class
- if after_class then
- after_class = t_uncover(splitter,t_u_cache,after_class)
- end
- for i=1,#rules do
- local rule = rules[i]
- local class = rule.class
- local before = class.before
- if before then
- for i=1,#before do
- before[i] = before_class[before[i]] or { }
- end
- rule.before = t_hashed(before,t_h_cache)
- end
- local current = class.current
- local lookups = rule.lookups
- if current then
- for i=1,#current do
- current[i] = current_class[current[i]] or { }
- -- let's not be sparse
- if lookups and not lookups[i] then
- lookups[i] = "" -- (was: false) e.g. we can have two lookups and one replacement
- end
- -- end of fix
- end
- rule.current = t_hashed(current,t_h_cache)
- end
- local after = class.after
- if after then
- for i=1,#after do
- after[i] = after_class[after[i]] or { }
- end
- rule.after = t_hashed(after,t_h_cache)
- end
- rule.class = nil
- end
- lookup.before_class = nil
- lookup.current_class = nil
- lookup.after_class = nil
- lookup.format = "coverage"
- elseif format == "coverage" then
- for i=1,#rules do
- local rule = rules[i]
- local coverage = rule.coverage
- if coverage then
- local before = coverage.before
- if before then
- before = t_uncover(splitter,t_u_cache,reversed(before))
- rule.before = t_hashed(before,t_h_cache)
- end
- local current = coverage.current
- if current then
- current = t_uncover(splitter,t_u_cache,current)
- -- let's not be sparse
- local lookups = rule.lookups
- if lookups then
- for i=1,#current do
- if not lookups[i] then
- lookups[i] = "" -- fix sparse array
- end
- end
- end
- --
- rule.current = t_hashed(current,t_h_cache)
- end
- local after = coverage.after
- if after then
- after = t_uncover(splitter,t_u_cache,after)
- rule.after = t_hashed(after,t_h_cache)
- end
- rule.coverage = nil
- end
- end
- elseif format == "reversecoverage" then -- special case, single substitution only
- for i=1,#rules do
- local rule = rules[i]
- local reversecoverage = rule.reversecoverage
- if reversecoverage then
- local before = reversecoverage.before
- if before then
- before = t_uncover(splitter,t_u_cache,reversed(before))
- rule.before = t_hashed(before,t_h_cache)
- end
- local current = reversecoverage.current
- if current then
- current = t_uncover(splitter,t_u_cache,current)
- rule.current = t_hashed(current,t_h_cache)
- end
- local after = reversecoverage.after
- if after then
- after = t_uncover(splitter,t_u_cache,after)
- rule.after = t_hashed(after,t_h_cache)
- end
- local replacements = reversecoverage.replacements
- if replacements then
- rule.replacements = r_uncover(splitter,r_u_cache,current,replacements)
- end
- rule.reversecoverage = nil
- end
- end
- elseif format == "glyphs" then
- -- I could store these more efficient (as not we use a nested tables for before,
- -- after and current but this features happens so seldom that I don't bother
- -- about it right now.
- for i=1,#rules do
- local rule = rules[i]
- local glyphs = rule.glyphs
- if glyphs then
- local fore = glyphs.fore
- if fore and fore ~= "" then
- fore = s_uncover(splitter,s_u_cache,fore)
- rule.after = s_hashed(fore,s_h_cache)
- end
- local back = glyphs.back
- if back then
- back = s_uncover(splitter,s_u_cache,back)
- rule.before = s_hashed(back,s_h_cache)
- end
- local names = glyphs.names
- if names then
- names = s_uncover(splitter,s_u_cache,names)
- rule.current = s_hashed(names,s_h_cache)
- end
- rule.glyphs = nil
- local lookups = rule.lookups
- if lookups then
- for i=1,#names do
- if not lookups[i] then
- lookups[i] = "" -- fix sparse array
- end
- end
- end
- end
- end
- end
- end
- end
- end
-end
-
-local function enhance_expand_lookups(data,filename,raw) -- we could check for "" and n == 0
- if data.lookups then
- local cache = data.helpers.matchcache
- if cache then
- local duplicates = data.resources.duplicates
- for key, hash in next, cache do
- local done = nil
- for key in next, hash do
- local unicode = duplicates[key]
- if not unicode then
- -- no duplicate
- elseif type(unicode) == "table" then
- -- multiple duplicates
- for i=1,#unicode do
- local u = unicode[i]
- if hash[u] then
- -- already in set
- elseif done then
- done[u] = key
- else
- done = { [u] = key }
- end
- end
- else
- -- one duplicate
- if hash[unicode] then
- -- already in set
- elseif done then
- done[unicode] = key
- else
- done = { [unicode] = key }
- end
- end
- end
- if done then
- for u in next, done do
- hash[u] = true
- end
- end
- end
- end
- end
-end
-
-local function check_variants(unicode,the_variants,splitter,unicodes)
- local variants = the_variants.variants
- if variants then -- use splitter
- local glyphs = lpegmatch(splitter,variants)
- local done = { [unicode] = true }
- local n = 0
- for i=1,#glyphs do
- local g = glyphs[i]
- if done[g] then
- if i > 1 then
- report_otf("skipping cyclic reference %U in math variant %U",g,unicode)
- end
- else
- if n == 0 then
- n = 1
- variants = { g }
- else
- n = n + 1
- variants[n] = g
- end
- done[g] = true
- end
- end
- if n == 0 then
- variants = nil
- end
- end
- local parts = the_variants.parts
- if parts then
- local p = #parts
- if p > 0 then
- for i=1,p do
- local pi = parts[i]
- pi.glyph = unicodes[pi.component] or 0
- pi.component = nil
- end
- else
- parts = nil
- end
- end
- local italic = the_variants.italic
- if italic and italic == 0 then
- italic = nil
- end
- return variants, parts, italic
-end
-
-local function enhance_analyze_math(data,filename,raw)
- if raw.math then
- data.metadata.math = raw.math
- local unicodes = data.resources.unicodes
- local splitter = data.helpers.tounicodetable
- for unicode, description in next, data.descriptions do
- local glyph = description.glyph
- local mathkerns = glyph.mathkern -- singular
- local hvariants = glyph.horiz_variants
- local vvariants = glyph.vert_variants
- local accent = glyph.top_accent
- local italic = glyph.italic_correction
- if mathkerns or hvariants or vvariants or accent or italic then
- local math = { }
- if accent then
- math.accent = accent
- end
- if mathkerns then
- local topright = mathkerns.top_right
- local topleft = mathkerns.top_left
- local bottomright = mathkerns.bottom_right
- local bottomleft = mathkerns.bottom_left
- math.kerns = {
- topright = topright and next(topright) and topright or nil,
- topleft = topleft and next(topleft) and topleft or nil,
- bottomright = bottomright and next(bottomright) and bottomright or nil,
- bottomleft = bottomleft and next(bottomleft) and bottomleft or nil,
- }
- end
- if hvariants then
- math.hvariants, math.hparts, math.hitalic = check_variants(unicode,hvariants,splitter,unicodes)
- end
- if vvariants then
- math.vvariants, math.vparts, math.vitalic = check_variants(unicode,vvariants,splitter,unicodes)
- end
- if italic and italic ~= 0 then
- math.italic = italic
- end
- description.math = math
- end
- end
- end
-end
-
-local function enhance_reorganize_glyph_kerns(data,filename,raw)
- local descriptions = data.descriptions
- local resources = data.resources
- local unicodes = resources.unicodes
- for unicode, description in next, descriptions do
- local kerns = description.glyph.kerns
- if kerns then
- local newkerns = { }
- for k, kern in next, kerns do
- local name = kern.char
- local offset = kern.off
- local lookup = kern.lookup
- if name and offset and lookup then
- local unicode = unicodes[name]
- if unicode then
- if type(lookup) == "table" then
- for l=1,#lookup do
- local lookup = lookup[l]
- local lookupkerns = newkerns[lookup]
- if lookupkerns then
- lookupkerns[unicode] = offset
- else
- newkerns[lookup] = { [unicode] = offset }
- end
- end
- else
- local lookupkerns = newkerns[lookup]
- if lookupkerns then
- lookupkerns[unicode] = offset
- else
- newkerns[lookup] = { [unicode] = offset }
- end
- end
- elseif trace_loading then
- report_otf("problems with unicode %a of kern %a of glyph %U",name,k,unicode)
- end
- end
- end
- description.kerns = newkerns
- end
- end
-end
-
-local function enhance_merge_kern_classes(data,filename,raw)
- local gposlist = raw.gpos
- if gposlist then
- local descriptions = data.descriptions
- local resources = data.resources
- local unicodes = resources.unicodes
- local splitter = data.helpers.tounicodetable
- local ignored = 0
- local blocked = 0
- for gp=1,#gposlist do
- local gpos = gposlist[gp]
- local subtables = gpos.subtables
- if subtables then
- local first_done = { } -- could become an option so that we can deal with buggy fonts that don't get fixed
- local split = { } -- saves time .. although probably not that much any more in the fixed luatex kernclass table
- for s=1,#subtables do
- local subtable = subtables[s]
- local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes
- local lookup = subtable.lookup or subtable.name
- if kernclass then -- the next one is quite slow
- -- as fas as i can see the kernclass is a table with one entry and offsets
- -- have no [1] so we could remov eon elevel (kernclass) and start offsets
- -- at 1 but we're too far down the road now to fix that
- if #kernclass > 0 then
- kernclass = kernclass[1]
- lookup = type(kernclass.lookup) == "string" and kernclass.lookup or lookup
- report_otf("fixing kernclass table of lookup %a",lookup)
- end
- local firsts = kernclass.firsts
- local seconds = kernclass.seconds
- local offsets = kernclass.offsets
- -- if offsets[1] == nil then
- -- offsets[1] = "" -- defaults ?
- -- end
- for n, s in next, firsts do
- split[s] = split[s] or lpegmatch(splitter,s)
- end
- local maxseconds = 0
- for n, s in next, seconds do
- if n > maxseconds then
- maxseconds = n
- end
- split[s] = split[s] or lpegmatch(splitter,s)
- end
- for fk=1,#firsts do -- maxfirsts ?
- local fv = firsts[fk]
- local splt = split[fv]
- if splt then
- local extrakerns = { }
- local baseoffset = (fk-1) * maxseconds
- -- for sk, sv in next, seconds do
- for sk=2,maxseconds do
- local sv = seconds[sk]
- if sv then
- local splt = split[sv]
- if splt then -- redundant test
- local offset = offsets[baseoffset + sk]
- if offset then
- for i=1,#splt do
- extrakerns[splt[i]] = offset
- end
- end
- end
- end
- end
- for i=1,#splt do
- local first_unicode = splt[i]
- if first_done[first_unicode] then
- report_otf("lookup %a: ignoring further kerns of %C",lookup,first_unicode)
- blocked = blocked + 1
- else
- first_done[first_unicode] = true
- local description = descriptions[first_unicode]
- if description then
- local kerns = description.kerns
- if not kerns then
- kerns = { } -- unicode indexed !
- description.kerns = kerns
- end
- local lookupkerns = kerns[lookup]
- if not lookupkerns then
- lookupkerns = { }
- kerns[lookup] = lookupkerns
- end
- if overloadkerns then
- for second_unicode, kern in next, extrakerns do
- lookupkerns[second_unicode] = kern
- end
- else
- for second_unicode, kern in next, extrakerns do
- local k = lookupkerns[second_unicode]
- if not k then
- lookupkerns[second_unicode] = kern
- elseif k ~= kern then
- if trace_loading then
- report_otf("lookup %a: ignoring overload of kern between %C and %C, rejecting %a, keeping %a",lookup,first_unicode,second_unicode,k,kern)
- end
- ignored = ignored + 1
- end
- end
- end
- elseif trace_loading then
- report_otf("no glyph data for %U", first_unicode)
- end
- end
- end
- end
- end
- subtable.kernclass = { }
- end
- end
- end
- end
- if ignored > 0 then
- report_otf("%s kern overloads ignored",ignored)
- end
- if blocked > 0 then
- report_otf("%s successive kerns blocked",blocked)
- end
- end
-end
-
-local function enhance_check_glyphs(data,filename,raw)
- for unicode, description in next, data.descriptions do
- description.glyph = nil
- end
-end
-
--- future versions will remove _
-
-local valid = (R("\x00\x7E") - S("(){}[]<>%/ \n\r\f\v"))^0 * P(-1)
-
-local function valid_ps_name(str)
- return str and str ~= "" and #str < 64 and lpegmatch(valid,str) and true or false
-end
-
-local function enhance_check_metadata(data,filename,raw)
- local metadata = data.metadata
- for _, k in next, mainfields do
- if valid_fields[k] then
- local v = raw[k]
- if not metadata[k] then
- metadata[k] = v
- end
- end
- end
- -- metadata.pfminfo = raw.pfminfo -- not already done?
- local ttftables = metadata.ttf_tables
- if ttftables then
- for i=1,#ttftables do
- ttftables[i].data = "deleted"
- end
- end
- --
- local state = metadata.validation_state
- local names = raw.names
- --
- if state and table.contains(state,"bad_ps_fontname") then
- -- the ff library does a bit too much (and wrong) checking ... so we need to catch this
- -- at least for now
- local function valid(what)
- if names then
- for i=1,#names do
- local list = names[i]
- local names = list.names
- if names then
- local name = names[what]
- if name and valid_ps_name(name) then
- return name
- end
- end
- end
- end
- end
- local function check(what)
- local oldname = metadata[what]
- if valid_ps_name(oldname) then
- report_otf("ignoring warning %a because %s %a is proper ASCII","bad_ps_fontname",what,oldname)
- else
- local newname = valid(what)
- if not newname then
- newname = formatters["bad-%s-%s"](what,file.nameonly(filename))
- end
- local warning = formatters["overloading %s from invalid ASCII name %a to %a"](what,oldname,newname)
- data.warnings[#data.warnings+1] = warning
- report_otf(warning)
- metadata[what] = newname
- end
- end
- check("fontname")
- check("fullname")
- end
- --
- if names then
- local psname = metadata.psname
- if not psname or psname == "" then
- for i=1,#names do
- local name = names[i]
- -- Currently we use the same restricted search as in the new context (specific) font loader
- -- but we might add more lang checks (it worked ok in the new loaded so now we're in sync)
- -- This check here is also because there are (esp) cjk fonts out there with psnames different
- -- from fontnames (gives a bad lookup in backend).
- if lower(name.lang) == "english (us)" then
- local specification = name.names
- if specification then
- local postscriptname = specification.postscriptname
- if postscriptname then
- psname = postscriptname
- end
- end
- end
- break
- end
- end
- if psname ~= metadata.fontname then
- report_otf("fontname %a, fullname %a, psname %a",metadata.fontname,metadata.fullname,psname)
- end
- metadata.psname = psname
- end
- --
- if state and table.contains(state,"bad_cmap_table") then
- report_otf("fontfile %a has bad cmap tables",filename)
- end
-end
-
-local function enhance_cleanup_tables(data,filename,raw)
- local duplicates = data.resources.duplicates
- if duplicates then
- for k, v in next, duplicates do
- if #v == 1 then
- duplicates[k] = v[1]
- end
- end
- end
- data.resources.indices = nil -- not needed
- data.resources.unicodes = nil -- delayed
- data.helpers = nil -- tricky as we have no unicodes any more
-end
-
--- kern: ttf has a table with kerns
---
--- Weird, as maxfirst and maxseconds can have holes, first seems to be indexed, but
--- seconds can start at 2 .. this need to be fixed as getn as well as # are sort of
--- unpredictable alternatively we could force an [1] if not set (maybe I will do that
--- anyway).
-
--- we can share { } as it is never set
-
--- ligatures have an extra specification.char entry that we don't use
-
--- mlookups only with pairs and ligatures
-
-local function enhance_reorganize_glyph_lookups(data,filename,raw)
- local resources = data.resources
- local unicodes = resources.unicodes
- local descriptions = data.descriptions
- local splitter = data.helpers.tounicodelist
-
- local lookuptypes = resources.lookuptypes
-
- for unicode, description in next, descriptions do
- local lookups = description.glyph.lookups
- if lookups then
- for tag, lookuplist in next, lookups do
- for l=1,#lookuplist do
- local lookup = lookuplist[l]
- local specification = lookup.specification
- local lookuptype = lookup.type
- local lt = lookuptypes[tag]
- if not lt then
- lookuptypes[tag] = lookuptype
- elseif lt ~= lookuptype then
- report_otf("conflicting lookuptypes, %a points to %a and %a",tag,lt,lookuptype)
- end
- if lookuptype == "ligature" then
- lookuplist[l] = { lpegmatch(splitter,specification.components) }
- elseif lookuptype == "alternate" then
- lookuplist[l] = { lpegmatch(splitter,specification.components) }
- elseif lookuptype == "substitution" then
- lookuplist[l] = unicodes[specification.variant]
- elseif lookuptype == "multiple" then
- lookuplist[l] = { lpegmatch(splitter,specification.components) }
- elseif lookuptype == "position" then
- lookuplist[l] = {
- specification.x or 0,
- specification.y or 0,
- specification.h or 0,
- specification.v or 0
- }
- elseif lookuptype == "pair" then
- local one = specification.offsets[1]
- local two = specification.offsets[2]
- local paired = unicodes[specification.paired]
- if one then
- if two then
- lookuplist[l] = { paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0 } }
- else
- lookuplist[l] = { paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 } }
- end
- else
- if two then
- lookuplist[l] = { paired, { }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0} } -- maybe nil instead of { }
- else
- lookuplist[l] = { paired }
- end
- end
- end
- end
- end
- local slookups, mlookups
- for tag, lookuplist in next, lookups do
- if #lookuplist == 1 then
- if slookups then
- slookups[tag] = lookuplist[1]
- else
- slookups = { [tag] = lookuplist[1] }
- end
- else
- if mlookups then
- mlookups[tag] = lookuplist
- else
- mlookups = { [tag] = lookuplist }
- end
- end
- end
- if slookups then
- description.slookups = slookups
- end
- if mlookups then
- description.mlookups = mlookups
- end
- -- description.lookups = nil
- end
- end
-end
-
-local zero = { 0, 0 }
-
-local function enhance_reorganize_glyph_anchors(data,filename,raw)
- local descriptions = data.descriptions
- for unicode, description in next, descriptions do
- local anchors = description.glyph.anchors
- if anchors then
- for class, data in next, anchors do
- if class == "baselig" then
- for tag, specification in next, data do
- -- for i=1,#specification do
- -- local si = specification[i]
- -- specification[i] = { si.x or 0, si.y or 0 }
- -- end
- -- can be sparse so we need to fill the holes
- local n = 0
- for k, v in next, specification do
- if k > n then
- n = k
- end
- local x, y = v.x, v.y
- if x or y then
- specification[k] = { x or 0, y or 0 }
- else
- specification[k] = zero
- end
- end
- local t = { }
- for i=1,n do
- t[i] = specification[i] or zero
- end
- data[tag] = t -- so # is okay (nicer for packer)
- end
- else
- for tag, specification in next, data do
- local x, y = specification.x, specification.y
- if x or y then
- data[tag] = { x or 0, y or 0 }
- else
- data[tag] = zero
- end
- end
- end
- end
- description.anchors = anchors
- end
- end
-end
-
-local bogusname = (P("uni") + P("u")) * R("AF","09")^4
- + (P("index") + P("glyph") + S("Ii") * P("dentity") * P(".")^0) * R("09")^1
-local uselessname = (1-bogusname)^0 * bogusname
-
-local function enhance_purge_names(data,filename,raw) -- not used yet
- if purge_names then
- local n = 0
- for u, d in next, data.descriptions do
- if lpegmatch(uselessname,d.name) then
- n = n + 1
- d.name = nil
- end
- -- d.comment = nil
- end
- if n > 0 then
- report_otf("%s bogus names removed",n)
- end
- end
-end
-
-local function enhance_compact_lookups(data,filename,raw)
- if not compact_lookups then
- report_otf("not compacting")
- return
- end
- -- create keyhash
- local last = 0
- local tags = table.setmetatableindex({ },
- function(t,k)
- last = last + 1
- t[k] = last
- return last
- end
- )
- --
- local descriptions = data.descriptions
- local resources = data.resources
- --
- for u, d in next, descriptions do
- --
- -- -- we can also compact anchors and cursives (basechar basemark baselig mark)
- --
- local slookups = d.slookups
- if type(slookups) == "table" then
- local s = { }
- for k, v in next, slookups do
- s[tags[k]] = v
- end
- d.slookups = s
- end
- --
- local mlookups = d.mlookups
- if type(mlookups) == "table" then
- local m = { }
- for k, v in next, mlookups do
- m[tags[k]] = v
- end
- d.mlookups = m
- end
- --
- local kerns = d.kerns
- if type(kerns) == "table" then
- local t = { }
- for k, v in next, kerns do
- t[tags[k]] = v
- end
- d.kerns = t
- end
- end
- --
- local lookups = data.lookups
- if lookups then
- local l = { }
- for k, v in next, lookups do
- local rules = v.rules
- if rules then
- for i=1,#rules do
- local l = rules[i].lookups
- if type(l) == "table" then
- for i=1,#l do
- l[i] = tags[l[i]]
- end
- end
- end
- end
- l[tags[k]] = v
- end
- data.lookups = l
- end
- --
- local lookups = resources.lookups
- if lookups then
- local l = { }
- for k, v in next, lookups do
- local s = v.subtables
- if type(s) == "table" then
- for i=1,#s do
- s[i] = tags[s[i]]
- end
- end
- l[tags[k]] = v
- end
- resources.lookups = l
- end
- --
- local sequences = resources.sequences
- if sequences then
- for i=1,#sequences do
- local s = sequences[i]
- local n = s.name
- if n then
- s.name = tags[n]
- end
- local t = s.subtables
- if type(t) == "table" then
- for i=1,#t do
- t[i] = tags[t[i]]
- end
- end
- end
- end
- --
- local lookuptypes = resources.lookuptypes
- if lookuptypes then
- local l = { }
- for k, v in next, lookuptypes do
- l[tags[k]] = v
- end
- resources.lookuptypes = l
- end
- --
- local anchor_to_lookup = resources.anchor_to_lookup
- if anchor_to_lookup then
- for anchor, lookups in next, anchor_to_lookup do
- local l = { }
- for lookup, value in next, lookups do
- l[tags[lookup]] = value
- end
- anchor_to_lookup[anchor] = l
- end
- end
- --
- local lookup_to_anchor = resources.lookup_to_anchor
- if lookup_to_anchor then
- local l = { }
- for lookup, value in next, lookup_to_anchor do
- l[tags[lookup]] = value
- end
- resources.lookup_to_anchor = l
- end
- --
- tags = table.swapped(tags)
- --
- report_otf("%s lookup tags compacted",#tags)
- --
- resources.lookuptags = tags
-end
-
--- modes: node, base, none
-
-function otf.setfeatures(tfmdata,features)
- local okay = constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf)
- if okay then
- return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf)
- else
- return { } -- will become false
- end
-end
-
--- the first version made a top/mid/not extensible table, now we just
--- pass on the variants data and deal with it in the tfm scaler (there
--- is no longer an extensible table anyway)
---
--- we cannot share descriptions as virtual fonts might extend them (ok,
--- we could use a cache with a hash
---
--- we already assign an empty tabel to characters as we can add for
--- instance protruding info and loop over characters; one is not supposed
--- to change descriptions and if one does so one should make a copy!
-
-local function copytotfm(data,cache_id)
- if data then
- local metadata = data.metadata
- local warnings = data.warnings
- local resources = data.resources
- local properties = derivetable(data.properties)
- local descriptions = derivetable(data.descriptions)
- local goodies = derivetable(data.goodies)
- local characters = { }
- local parameters = { }
- local mathparameters = { }
- --
- local pfminfo = metadata.pfminfo or { }
- local resources = data.resources
- local unicodes = resources.unicodes
- -- local mode = data.mode or "base"
- local spaceunits = 500
- local spacer = "space"
- local designsize = metadata.designsize or metadata.design_size or 100
- local minsize = metadata.minsize or metadata.design_range_bottom or designsize
- local maxsize = metadata.maxsize or metadata.design_range_top or designsize
- local mathspecs = metadata.math
- --
- if designsize == 0 then
- designsize = 100
- minsize = 100
- maxsize = 100
- end
- if mathspecs then
- for name, value in next, mathspecs do
- mathparameters[name] = value
- end
- end
- for unicode, _ in next, data.descriptions do -- use parent table
- characters[unicode] = { }
- end
- if mathspecs then
- -- we could move this to the scaler but not that much is saved
- -- and this is cleaner
- for unicode, character in next, characters do
- local d = descriptions[unicode]
- local m = d.math
- if m then
- -- watch out: luatex uses horiz_variants for the parts
- --
- local italic = m.italic
- local vitalic = m.vitalic
- --
- local variants = m.hvariants
- local parts = m.hparts
- -- local done = { [unicode] = true }
- if variants then
- local c = character
- for i=1,#variants do
- local un = variants[i]
- -- if done[un] then
- -- -- report_otf("skipping cyclic reference %U in math variant %U",un,unicode)
- -- else
- c.next = un
- c = characters[un]
- -- done[un] = true
- -- end
- end -- c is now last in chain
- c.horiz_variants = parts
- elseif parts then
- character.horiz_variants = parts
- italic = m.hitalic
- end
- --
- local variants = m.vvariants
- local parts = m.vparts
- -- local done = { [unicode] = true }
- if variants then
- local c = character
- for i=1,#variants do
- local un = variants[i]
- -- if done[un] then
- -- -- report_otf("skipping cyclic reference %U in math variant %U",un,unicode)
- -- else
- c.next = un
- c = characters[un]
- -- done[un] = true
- -- end
- end -- c is now last in chain
- c.vert_variants = parts
- elseif parts then
- character.vert_variants = parts
- end
- --
- if italic and italic ~= 0 then
- character.italic = italic -- overload
- end
- if vitalic and vitalic ~= 0 then
- character.vert_italic = vitalic
- end
- --
- local accent = m.accent
- if accent then
- character.accent = accent
- end
- --
- local kerns = m.kerns
- if kerns then
- character.mathkerns = kerns
- end
- end
- end
- end
- -- end math
- -- we need a runtime lookup because of running from cdrom or zip, brrr (shouldn't we use the basename then?)
- local filename = constructors.checkedfilename(resources)
- local fontname = metadata.fontname
- local fullname = metadata.fullname or fontname
- local psname = metadata.psname or fontname or fullname
- local units = metadata.units or metadata.units_per_em or 1000
- --
- if units == 0 then -- catch bugs in fonts
- units = 1000 -- maybe 2000 when ttf
- metadata.units = 1000
- report_otf("changing %a units to %a",0,units)
- end
- --
- local monospaced = metadata.monospaced or metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion == "Monospaced")
- local charwidth = pfminfo.avgwidth -- or unset
- local charxheight = pfminfo.os2_xheight and pfminfo.os2_xheight > 0 and pfminfo.os2_xheight
--- charwidth = charwidth * units/1000
--- charxheight = charxheight * units/1000
- local italicangle = metadata.italicangle
- properties.monospaced = monospaced
- parameters.italicangle = italicangle
- parameters.charwidth = charwidth
- parameters.charxheight = charxheight
- --
- local space = 0x0020
- local emdash = 0x2014
- if monospaced then
- if descriptions[space] then
- spaceunits, spacer = descriptions[space].width, "space"
- end
- if not spaceunits and descriptions[emdash] then
- spaceunits, spacer = descriptions[emdash].width, "emdash"
- end
- if not spaceunits and charwidth then
- spaceunits, spacer = charwidth, "charwidth"
- end
- else
- if descriptions[space] then
- spaceunits, spacer = descriptions[space].width, "space"
- end
- if not spaceunits and descriptions[emdash] then
- spaceunits, spacer = descriptions[emdash].width/2, "emdash/2"
- end
- if not spaceunits and charwidth then
- spaceunits, spacer = charwidth, "charwidth"
- end
- end
- spaceunits = tonumber(spaceunits) or units/2
- --
- parameters.slant = 0
- parameters.space = spaceunits -- 3.333 (cmr10)
- parameters.space_stretch = units/2 -- 500 -- 1.666 (cmr10)
- parameters.space_shrink = 1*units/3 -- 333 -- 1.111 (cmr10)
- parameters.x_height = 2*units/5 -- 400
- parameters.quad = units -- 1000
- if spaceunits < 2*units/5 then
- -- todo: warning
- end
- if italicangle and italicangle ~= 0 then
- parameters.italicangle = italicangle
- parameters.italicfactor = math.cos(math.rad(90+italicangle))
- parameters.slant = - math.tan(italicangle*math.pi/180)
- end
- if monospaced then
- parameters.space_stretch = 0
- parameters.space_shrink = 0
- elseif syncspace then --
- parameters.space_stretch = spaceunits/2
- parameters.space_shrink = spaceunits/3
- end
- parameters.extra_space = parameters.space_shrink -- 1.111 (cmr10)
- if charxheight then
- parameters.x_height = charxheight
- else
- local x = 0x0078
- if x then
- local x = descriptions[x]
- if x then
- parameters.x_height = x.height
- end
- end
- end
- --
- parameters.designsize = (designsize/10)*65536
- parameters.minsize = (minsize /10)*65536
- parameters.maxsize = (maxsize /10)*65536
- parameters.ascender = abs(metadata.ascender or metadata.ascent or 0)
- parameters.descender = abs(metadata.descender or metadata.descent or 0)
- parameters.units = units
- --
- properties.space = spacer
- properties.encodingbytes = 2
- properties.format = data.format or otf_format(filename) or formats.otf
- properties.noglyphnames = true
- properties.filename = filename
- properties.fontname = fontname
- properties.fullname = fullname
- properties.psname = psname
- properties.name = filename or fullname
- --
- -- properties.name = specification.name
- -- properties.sub = specification.sub
- --
- if warnings and #warnings > 0 then
- report_otf("warnings for font: %s",filename)
- report_otf()
- for i=1,#warnings do
- report_otf(" %s",warnings[i])
- end
- report_otf()
- end
- return {
- characters = characters,
- descriptions = descriptions,
- parameters = parameters,
- mathparameters = mathparameters,
- resources = resources,
- properties = properties,
- goodies = goodies,
- warnings = warnings,
- }
- end
-end
-
-local function otftotfm(specification)
- local cache_id = specification.hash
- local tfmdata = containers.read(constructors.cache,cache_id)
- if not tfmdata then
- local name = specification.name
- local sub = specification.sub
- local filename = specification.filename
- -- local format = specification.format
- local features = specification.features.normal
- local rawdata = otf.load(filename,sub,features and features.featurefile)
- if rawdata and next(rawdata) then
- local descriptions = rawdata.descriptions
- local duplicates = rawdata.resources.duplicates
- if duplicates then
- local nofduplicates, nofduplicated = 0, 0
- for parent, list in next, duplicates do
- if type(list) == "table" then
- local n = #list
- for i=1,n do
- local unicode = list[i]
- if not descriptions[unicode] then
- descriptions[unicode] = descriptions[parent] -- or copy
- nofduplicated = nofduplicated + 1
- end
- end
- nofduplicates = nofduplicates + n
- else
- if not descriptions[list] then
- descriptions[list] = descriptions[parent] -- or copy
- nofduplicated = nofduplicated + 1
- end
- nofduplicates = nofduplicates + 1
- end
- end
- if trace_otf and nofduplicated ~= nofduplicates then
- report_otf("%i extra duplicates copied out of %i",nofduplicated,nofduplicates)
- end
- end
- rawdata.lookuphash = { }
- tfmdata = copytotfm(rawdata,cache_id)
- if tfmdata and next(tfmdata) then
- -- at this moment no characters are assigned yet, only empty slots
- local features = constructors.checkedfeatures("otf",features)
- local shared = tfmdata.shared
- if not shared then
- shared = { }
- tfmdata.shared = shared
- end
- shared.rawdata = rawdata
- -- shared.features = features -- default
- shared.dynamics = { }
- -- shared.processes = { }
- tfmdata.changed = { }
- shared.features = features
- shared.processes = otf.setfeatures(tfmdata,features)
- end
- end
- containers.write(constructors.cache,cache_id,tfmdata)
- end
- return tfmdata
-end
-
-local function read_from_otf(specification)
- local tfmdata = otftotfm(specification)
- if tfmdata then
- -- this late ? .. needs checking
- tfmdata.properties.name = specification.name
- tfmdata.properties.sub = specification.sub
- --
- tfmdata = constructors.scale(tfmdata,specification)
- local allfeatures = tfmdata.shared.features or specification.features.normal
- constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf)
- constructors.setname(tfmdata,specification) -- only otf?
- fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification)
- end
- return tfmdata
-end
-
-local function checkmathsize(tfmdata,mathsize)
- local mathdata = tfmdata.shared.rawdata.metadata.math
- local mathsize = tonumber(mathsize)
- if mathdata then -- we cannot use mathparameters as luatex will complain
- local parameters = tfmdata.parameters
- parameters.scriptpercentage = mathdata.ScriptPercentScaleDown
- parameters.scriptscriptpercentage = mathdata.ScriptScriptPercentScaleDown
- parameters.mathsize = mathsize
- end
-end
-
-registerotffeature {
- name = "mathsize",
- description = "apply mathsize specified in the font",
- initializers = {
- base = checkmathsize,
- node = checkmathsize,
- }
-}
-
--- helpers
-
-function otf.collectlookups(rawdata,kind,script,language)
- local sequences = rawdata.resources.sequences
- if sequences then
- local featuremap, featurelist = { }, { }
- for s=1,#sequences do
- local sequence = sequences[s]
- local features = sequence.features
- features = features and features[kind]
- features = features and (features[script] or features[default] or features[wildcard])
- features = features and (features[language] or features[default] or features[wildcard])
- if features then
- local subtables = sequence.subtables
- if subtables then
- for s=1,#subtables do
- local ss = subtables[s]
- if not featuremap[s] then
- featuremap[ss] = true
- featurelist[#featurelist+1] = ss
- end
- end
- end
- end
- end
- if #featurelist > 0 then
- return featuremap, featurelist
- end
- end
- return nil, nil
-end
-
--- readers (a bit messy, this forced so I might redo that bit: foo.ttf FOO.ttf foo.TTF FOO.TTF)
-
-local function check_otf(forced,specification,suffix)
- local name = specification.name
- if forced then
- name = specification.forcedname -- messy
- end
- local fullname = findbinfile(name,suffix) or ""
- if fullname == "" then
- fullname = fonts.names.getfilename(name,suffix) or ""
- end
- if fullname ~= "" and not fonts.names.ignoredfile(fullname) then
- specification.filename = fullname
- return read_from_otf(specification)
- end
-end
-
-local function opentypereader(specification,suffix)
- local forced = specification.forced or ""
- if formats[forced] then
- return check_otf(true,specification,forced)
- else
- return check_otf(false,specification,suffix)
- end
-end
-
-readers.opentype = opentypereader -- kind of useless and obsolete
-
-function readers.otf (specification) return opentypereader(specification,"otf") end
-function readers.ttf (specification) return opentypereader(specification,"ttf") end
-function readers.ttc (specification) return opentypereader(specification,"ttf") end
-function readers.dfont(specification) return opentypereader(specification,"ttf") end
-
--- this will be overloaded
-
-function otf.scriptandlanguage(tfmdata,attr)
- local properties = tfmdata.properties
- return properties.script or "dflt", properties.language or "dflt"
-end
-
--- a little bit of abstraction
-
-local function justset(coverage,unicode,replacement)
- coverage[unicode] = replacement
-end
-
-otf.coverup = {
- stepkey = "subtables",
- actions = {
- substitution = justset,
- alternate = justset,
- multiple = justset,
- ligature = justset,
- kern = justset,
- pair = justset,
- chainsubstitution = justset,
- chainposition = justset,
- },
- register = function(coverage,lookuptype,format,feature,n,descriptions,resources)
- local name = formatters["ctx_%s_%s_%s"](feature,lookuptype,n) -- we can have a mix of types
- if lookuptype == "kern" then
- resources.lookuptypes[name] = "position"
- else
- resources.lookuptypes[name] = lookuptype
- end
- for u, c in next, coverage do
- local description = descriptions[u]
- local slookups = description.slookups
- if slookups then
- slookups[name] = c
- else
- description.slookups = { [name] = c }
- end
- end
- return name
- end
-}
-
--- moved from font-oth.lua
-
-local function getgsub(tfmdata,k,kind)
- local description = tfmdata.descriptions[k]
- if description then
- local slookups = description.slookups -- we assume only slookups (we can always extend)
- if slookups then
- local shared = tfmdata.shared
- local rawdata = shared and shared.rawdata
- if rawdata then
- local lookuptypes = rawdata.resources.lookuptypes
- if lookuptypes then
- local properties = tfmdata.properties
- -- we could cache these
- local validlookups, lookuplist = otf.collectlookups(rawdata,kind,properties.script,properties.language)
- if validlookups then
- for l=1,#lookuplist do
- local lookup = lookuplist[l]
- local found = slookups[lookup]
- if found then
- return found, lookuptypes[lookup]
- end
- end
- end
- end
- end
- end
- end
-end
-
-otf.getgsub = getgsub -- returns value, gsub_kind
-
-function otf.getsubstitution(tfmdata,k,kind,value)
- local found, kind = getgsub(tfmdata,k,kind)
- if not found then
- --
- elseif kind == "substitution" then
- return found
- elseif kind == "alternate" then
- local choice = tonumber(value) or 1 -- no random here (yet)
- return found[choice] or found[1] or k
- end
- return k
-end
-
-otf.getalternate = otf.getsubstitution
-
-function otf.getmultiple(tfmdata,k,kind)
- local found, kind = getgsub(tfmdata,k,kind)
- if found and kind == "multiple" then
- return found
- end
- return { k }
-end
-
-function otf.getkern(tfmdata,left,right,kind)
- local kerns = getgsub(tfmdata,left,kind or "kern",true) -- for now we use getsub
- if kerns then
- local found = kerns[right]
- local kind = type(found)
- if kind == "table" then
- found = found[1][3] -- can be more clever
- elseif kind ~= "number" then
- found = false
- end
- if found then
- return found * tfmdata.parameters.factor
- end
- end
- return 0
-end
-
-
-registerotfenhancer("prepare tables", enhance_prepare_tables)
-
-registerotfenhancer("prepare glyphs", enhance_prepare_glyphs)
-registerotfenhancer("prepare lookups", enhance_prepare_lookups)
-
-registerotfenhancer("analyze glyphs", enhance_analyze_glyphs)
-registerotfenhancer("analyze math", enhance_analyze_math)
-
-registerotfenhancer("reorganize lookups", enhance_reorganize_lookups)
-registerotfenhancer("reorganize mark classes", enhance_reorganize_mark_classes)
-registerotfenhancer("reorganize anchor classes", enhance_reorganize_anchor_classes)
-
-registerotfenhancer("reorganize glyph kerns", enhance_reorganize_glyph_kerns)
-registerotfenhancer("reorganize glyph lookups", enhance_reorganize_glyph_lookups)
-registerotfenhancer("reorganize glyph anchors", enhance_reorganize_glyph_anchors)
-
-registerotfenhancer("merge kern classes", enhance_merge_kern_classes)
-
-registerotfenhancer("reorganize features", enhance_reorganize_features)
-registerotfenhancer("reorganize subtables", enhance_reorganize_subtables)
-
-registerotfenhancer("check glyphs", enhance_check_glyphs)
-registerotfenhancer("check metadata", enhance_check_metadata)
-
-registerotfenhancer("prepare tounicode", enhance_prepare_tounicode)
-
-registerotfenhancer("check encoding", enhance_check_encoding)
-registerotfenhancer("add duplicates", enhance_add_duplicates)
-
-registerotfenhancer("expand lookups", enhance_expand_lookups)
-
-registerotfenhancer("check extra features", function() end) --placeholder, will be overloaded
-
-registerotfenhancer("cleanup tables", enhance_cleanup_tables)
-
-registerotfenhancer("compact lookups", enhance_compact_lookups)
-registerotfenhancer("purge names", enhance_purge_names)
diff --git a/tex/context/base/mkiv/font-otl.lua b/tex/context/base/mkiv/font-otl.lua
index bbe05304c..ee57d1d8f 100644
--- a/tex/context/base/mkiv/font-otl.lua
+++ b/tex/context/base/mkiv/font-otl.lua
@@ -558,7 +558,7 @@ local function checkmathsize(tfmdata,mathsize)
local parameters = tfmdata.parameters
parameters.scriptpercentage = mathdata.ScriptPercentScaleDown
parameters.scriptscriptpercentage = mathdata.ScriptScriptPercentScaleDown
- parameters.mathsize = mathsize
+ parameters.mathsize = mathsize -- only when a number !
end
end
diff --git a/tex/context/base/mkiv/font-otn.lua b/tex/context/base/mkiv/font-otn.lua
deleted file mode 100644
index d48021347..000000000
--- a/tex/context/base/mkiv/font-otn.lua
+++ /dev/null
@@ -1,3927 +0,0 @@
-if not modules then modules = { } end modules ['font-otn'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- this is a context version which can contain experimental code, but when we
--- have serious patches we also need to change the other two font-otn files
-
--- at some point i might decide to convert the whole list into a table and then
--- run over that instead (but it has some drawbacks as we also need to deal with
--- attributes and such so we need to keep a lot of track - which is why i rejected
--- that method - although it has become a bit easier in the meantime so it might
--- become an alternative (by that time i probably have gone completely lua) .. the
--- usual chicken-egg issues ... maybe mkix as it's no real tex any more then
-
--- preprocessors = { "nodes" }
-
--- anchor class : mark, mkmk, curs, mklg (todo)
--- anchor type : mark, basechar, baselig, basemark, centry, cexit, max (todo)
-
--- this is still somewhat preliminary and it will get better in due time;
--- much functionality could only be implemented thanks to the husayni font
--- of Idris Samawi Hamid to who we dedicate this module.
-
--- in retrospect it always looks easy but believe it or not, it took a lot
--- of work to get proper open type support done: buggy fonts, fuzzy specs,
--- special made testfonts, many skype sessions between taco, idris and me,
--- torture tests etc etc ... unfortunately the code does not show how much
--- time it took ...
-
--- todo:
---
--- extension infrastructure (for usage out of context)
--- sorting features according to vendors/renderers
--- alternative loop quitters
--- check cursive and r2l
--- find out where ignore-mark-classes went
--- default features (per language, script)
--- handle positions (we need example fonts)
--- handle gpos_single (we might want an extra width field in glyph nodes because adding kerns might interfere)
--- mark (to mark) code is still not what it should be (too messy but we need some more extreem husayni tests)
--- remove some optimizations (when I have a faster machine)
---
--- beware:
---
--- we do some disc jugling where we need to keep in mind that the
--- pre, post and replace fields can have prev pointers to a nesting
--- node ... i wonder if that is still needed
---
--- not possible:
---
--- \discretionary {alpha-} {betagammadelta}
--- {\discretionary {alphabeta-} {gammadelta}
--- {\discretionary {alphabetagamma-} {delta}
--- {alphabetagammadelta}}}
-
---[[ldx--
-<p>This module is a bit more split up that I'd like but since we also want to test
-with plain <l n='tex'/> it has to be so. This module is part of <l n='context'/>
-and discussion about improvements and functionality mostly happens on the
-<l n='context'/> mailing list.</p>
-
-<p>The specification of OpenType is kind of vague. Apart from a lack of a proper
-free specifications there's also the problem that Microsoft and Adobe
-may have their own interpretation of how and in what order to apply features.
-In general the Microsoft website has more detailed specifications and is a
-better reference. There is also some information in the FontForge help files.</p>
-
-<p>Because there is so much possible, fonts might contain bugs and/or be made to
-work with certain rederers. These may evolve over time which may have the side
-effect that suddenly fonts behave differently.</p>
-
-<p>After a lot of experiments (mostly by Taco, me and Idris) we're now at yet another
-implementation. Of course all errors are mine and of course the code can be
-improved. There are quite some optimizations going on here and processing speed
-is currently acceptable. Not all functions are implemented yet, often because I
-lack the fonts for testing. Many scripts are not yet supported either, but I will
-look into them as soon as <l n='context'/> users ask for it.</p>
-
-<p>The specification leaves room for interpretation. In case of doubt the microsoft
-implementation is the reference as it is the most complete one. As they deal with
-lots of scripts and fonts, Kai and Ivo did a lot of testing of the generic code and
-their suggestions help improve the code. I'm aware that not all border cases can be
-taken care of, unless we accept excessive runtime, and even then the interference
-with other mechanisms (like hyphenation) are not trivial.</p>
-
-<p>Glyphs are indexed not by unicode but in their own way. This is because there is no
-relationship with unicode at all, apart from the fact that a font might cover certain
-ranges of characters. One character can have multiple shapes. However, at the
-<l n='tex'/> end we use unicode so and all extra glyphs are mapped into a private
-space. This is needed because we need to access them and <l n='tex'/> has to include
-then in the output eventually.</p>
-
-<p>The raw table as it coms from <l n='fontforge'/> gets reorganized in to fit out needs.
-In <l n='context'/> that table is packed (similar tables are shared) and cached on disk
-so that successive runs can use the optimized table (after loading the table is
-unpacked). The flattening code used later is a prelude to an even more compact table
-format (and as such it keeps evolving).</p>
-
-<p>This module is sparsely documented because it is a moving target. The table format
-of the reader changes and we experiment a lot with different methods for supporting
-features.</p>
-
-<p>As with the <l n='afm'/> code, we may decide to store more information in the
-<l n='otf'/> table.</p>
-
-<p>Incrementing the version number will force a re-cache. We jump the number by one
-when there's a fix in the <l n='fontforge'/> library or <l n='lua'/> code that
-results in different tables.</p>
---ldx]]--
-
--- action handler chainproc
---
--- gsub_single ok ok
--- gsub_multiple ok ok
--- gsub_alternate ok ok
--- gsub_ligature ok ok
--- gsub_context ok --
--- gsub_contextchain ok --
--- gsub_reversecontextchain ok --
--- chainsub -- ok
--- reversesub -- ok
--- gpos_mark2base ok ok
--- gpos_mark2ligature ok ok
--- gpos_mark2mark ok ok
--- gpos_cursive ok untested
--- gpos_single ok ok
--- gpos_pair ok ok
--- gpos_context ok --
--- gpos_contextchain ok --
---
--- todo: contextpos
---
--- actions:
---
--- handler : actions triggered by lookup
--- chainproc : actions triggered by contextual lookup
--- chainmore : multiple substitutions triggered by contextual lookup (e.g. fij -> f + ij)
---
--- remark: the 'not implemented yet' variants will be done when we have fonts that use them
-
--- We used to have independent hashes for lookups but as the tags are unique
--- we now use only one hash. If needed we can have multiple again but in that
--- case I will probably prefix (i.e. rename) the lookups in the cached font file.
-
--- Todo: make plugin feature that operates on char/glyphnode arrays
-
-local type, next, tonumber = type, next, tonumber
-local random = math.random
-local formatters = string.formatters
-
-local logs, trackers, nodes, attributes = logs, trackers, nodes, attributes
-
-local registertracker = trackers.register
-local registerdirective = directives.register
-
-local fonts = fonts
-local otf = fonts.handlers.otf
-
-local trace_lookups = false registertracker("otf.lookups", function(v) trace_lookups = v end)
-local trace_singles = false registertracker("otf.singles", function(v) trace_singles = v end)
-local trace_multiples = false registertracker("otf.multiples", function(v) trace_multiples = v end)
-local trace_alternatives = false registertracker("otf.alternatives", function(v) trace_alternatives = v end)
-local trace_ligatures = false registertracker("otf.ligatures", function(v) trace_ligatures = v end)
-local trace_contexts = false registertracker("otf.contexts", function(v) trace_contexts = v end)
-local trace_marks = false registertracker("otf.marks", function(v) trace_marks = v end)
-local trace_kerns = false registertracker("otf.kerns", function(v) trace_kerns = v end)
-local trace_cursive = false registertracker("otf.cursive", function(v) trace_cursive = v end)
-local trace_preparing = false registertracker("otf.preparing", function(v) trace_preparing = v end)
-local trace_bugs = false registertracker("otf.bugs", function(v) trace_bugs = v end)
-local trace_details = false registertracker("otf.details", function(v) trace_details = v end)
-local trace_applied = false registertracker("otf.applied", function(v) trace_applied = v end)
-local trace_steps = false registertracker("otf.steps", function(v) trace_steps = v end)
-local trace_skips = false registertracker("otf.skips", function(v) trace_skips = v end)
-local trace_directions = false registertracker("otf.directions", function(v) trace_directions = v end)
-
-local trace_kernruns = false registertracker("otf.kernruns", function(v) trace_kernruns = v end)
-local trace_discruns = false registertracker("otf.discruns", function(v) trace_discruns = v end)
-local trace_compruns = false registertracker("otf.compruns", function(v) trace_compruns = v end)
-
-local quit_on_no_replacement = true -- maybe per font
-local zwnjruns = true
-
-registerdirective("otf.zwnjruns", function(v) zwnjruns = v end)
-registerdirective("otf.chain.quitonnoreplacement",function(value) quit_on_no_replacement = value end)
-
-local report_direct = logs.reporter("fonts","otf direct")
-local report_subchain = logs.reporter("fonts","otf subchain")
-local report_chain = logs.reporter("fonts","otf chain")
-local report_process = logs.reporter("fonts","otf process")
-local report_prepare = logs.reporter("fonts","otf prepare")
-local report_run = logs.reporter("fonts","otf run")
-
-registertracker("otf.substitutions", "otf.singles","otf.multiples","otf.alternatives","otf.ligatures")
-registertracker("otf.positions", "otf.marks","otf.kerns","otf.cursive")
-registertracker("otf.actions", "otf.substitutions","otf.positions")
-registertracker("otf.sample", "otf.steps","otf.substitutions","otf.positions","otf.analyzing")
-
-registertracker("otf.chain.verbose", function(v) otf.setcontextchain(v and "verbose") end)
-registertracker("otf.chain.normal", function(v) otf.setcontextchain(v and "normal") end)
-
-local nuts = nodes.nuts
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local setnext = nuts.setnext
-local getprev = nuts.getprev
-local setprev = nuts.setprev
-local getid = nuts.getid
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local getprop = nuts.getprop
-local setprop = nuts.setprop
-local getfont = nuts.getfont
-local getsubtype = nuts.getsubtype
-local setsubtype = nuts.setsubtype
-local getchar = nuts.getchar
-local setchar = nuts.setchar
-
-local insert_node_after = nuts.insert_after
-local copy_node = nuts.copy
-local copy_node_list = nuts.copy_list
-local find_node_tail = nuts.tail
-local flush_node_list = nuts.flush_list
-local flush_node = nuts.flush_node
-local end_of_math = nuts.end_of_math
-local traverse_nodes = nuts.traverse
-local traverse_id = nuts.traverse_id
-
-local setmetatableindex = table.setmetatableindex
-
-local zwnj = 0x200C
-local zwj = 0x200D
-local wildcard = "*"
-local default = "dflt"
-
-local nodecodes = nodes.nodecodes
-local glyphcodes = nodes.glyphcodes
-local disccodes = nodes.disccodes
-
-local glyph_code = nodecodes.glyph
-local disc_code = nodecodes.disc
-local math_code = nodecodes.math
-local dir_code = nodecodes.dir
-local localpar_code = nodecodes.localpar
-
-local discretionary_code = disccodes.discretionary
-local ligature_code = glyphcodes.ligature
-
-local privateattribute = attributes.private
-
--- Something is messed up: we have two mark / ligature indices, one at the injection
--- end and one here ... this is based on KE's patches but there is something fishy
--- there as I'm pretty sure that for husayni we need some connection (as it's much
--- more complex than an average font) but I need proper examples of all cases, not
--- of only some.
-
-local a_state = privateattribute('state')
-local a_cursbase = privateattribute('cursbase') -- to be checked, probably can go
-
-local injections = nodes.injections
-local setmark = injections.setmark
-local setcursive = injections.setcursive
-local setkern = injections.setkern
-local setpair = injections.setpair
-local resetinjection = injections.reset
-local copyinjection = injections.copy
-local setligaindex = injections.setligaindex
-local getligaindex = injections.getligaindex
-
-local cursonce = true
-
-local fonthashes = fonts.hashes
-local fontdata = fonthashes.identifiers
-
-local otffeatures = fonts.constructors.features.otf
-local registerotffeature = otffeatures.register
-
-local onetimemessage = fonts.loggers.onetimemessage or function() end
-
-local getrandom = utilities and utilities.randomizer and utilities.randomizer.get
-
-otf.defaultnodealternate = "none" -- first last
-
--- we share some vars here, after all, we have no nested lookups and less code
-
-local tfmdata = false
-local characters = false
-local descriptions = false
-local resources = false
-local marks = false
-local currentfont = false
-local lookuptable = false
-local anchorlookups = false
-local lookuptypes = false
-local lookuptags = false
-local handlers = { }
-local rlmode = 0
-local featurevalue = false
-
-local sweephead = { }
-local sweepnode = nil
-local sweepprev = nil
-local sweepnext = nil
-
-local notmatchpre = { }
-local notmatchpost = { }
-local notmatchreplace = { }
-
--- we use this for special testing and documentation
-
-local checkstep = (nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end
-local registerstep = (nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end
-local registermessage = (nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end
-
-local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_direct(...)
-end
-
-local function logwarning(...)
- report_direct(...)
-end
-
-local f_unicode = formatters["%U"]
-local f_uniname = formatters["%U (%s)"]
-local f_unilist = formatters["% t (% t)"]
-
-local function gref(n) -- currently the same as in font-otb
- if type(n) == "number" then
- local description = descriptions[n]
- local name = description and description.name
- if name then
- return f_uniname(n,name)
- else
- return f_unicode(n)
- end
- elseif n then
- local num, nam = { }, { }
- for i=1,#n do
- local ni = n[i]
- if tonumber(ni) then -- later we will start at 2
- local di = descriptions[ni]
- num[i] = f_unicode(ni)
- nam[i] = di and di.name or "-"
- end
- end
- return f_unilist(num,nam)
- else
- return "<error in node mode tracing>"
- end
-end
-
-local function cref(kind,chainname,chainlookupname,lookupname,index) -- not in the mood to alias f_
- if index then
- return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookuptags[lookupname],index)
- elseif lookupname then
- return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookuptags[lookupname])
- elseif chainlookupname then
- return formatters["feature %a, chain %a, sub %a"](kind,lookuptags[chainname],lookuptags[chainlookupname])
- elseif chainname then
- return formatters["feature %a, chain %a"](kind,lookuptags[chainname])
- else
- return formatters["feature %a"](kind)
- end
-end
-
-local function pref(kind,lookupname)
- return formatters["feature %a, lookup %a"](kind,lookuptags[lookupname])
-end
-
--- We can assume that languages that use marks are not hyphenated. We can also assume
--- that at most one discretionary is present.
-
--- We do need components in funny kerning mode but maybe I can better reconstruct then
--- as we do have the font components info available; removing components makes the
--- previous code much simpler. Also, later on copying and freeing becomes easier.
--- However, for arabic we need to keep them around for the sake of mark placement
--- and indices.
-
-local function copy_glyph(g) -- next and prev are untouched !
- local components = getfield(g,"components")
- if components then
- setfield(g,"components",nil)
- local n = copy_node(g)
- copyinjection(n,g) -- we need to preserve the lig indices
- setfield(g,"components",components)
- return n
- else
- local n = copy_node(g)
- copyinjection(n,g) -- we need to preserve the lig indices
- return n
- end
-end
-
-local function flattendisk(head,disc)
- local replace = getfield(disc,"replace")
- setfield(disc,"replace",nil)
- flush_node(disc)
- if head == disc then
- local next = getnext(disc)
- if replace then
- if next then
- local tail = find_node_tail(replace)
- setnext(tail,next)
- setprev(next,tail)
- end
- return replace, replace
- elseif next then
- return next, next
- else
- return -- maybe warning
- end
- else
- local next = getnext(disc)
- local prev = getprev(disc)
- if replace then
- local tail = find_node_tail(replace)
- if next then
- setnext(tail,next)
- setprev(next,tail)
- end
- setnext(prev,replace)
- setprev(replace,prev)
- return head, replace
- else
- if next then
- setprev(next,prev)
- end
- setnext(prev,next)
- return head, next
- end
- end
-end
-
-local function appenddisc(disc,list)
- local post = getfield(disc,"post")
- local replace = getfield(disc,"replace")
- local phead = list
- local rhead = copy_node_list(list)
- local ptail = find_node_tail(post)
- local rtail = find_node_tail(replace)
- if post then
- setnext(ptail,phead)
- setprev(phead,ptail)
- else
- setfield(disc,"post",phead)
- end
- if replace then
- setnext(rtail,rhead)
- setprev(rhead,rtail)
- else
- setfield(disc,"replace",rhead)
- end
-end
-
--- start is a mark and we need to keep that one
-
-local function markstoligature(kind,lookupname,head,start,stop,char)
- if start == stop and getchar(start) == char then
- return head, start
- else
- local prev = getprev(start)
- local next = getnext(stop)
- setprev(start,nil)
- setnext(stop,nil)
- local base = copy_glyph(start)
- if head == start then
- head = base
- end
- resetinjection(base)
- setchar(base,char)
- setsubtype(base,ligature_code)
- setfield(base,"components",start)
- if prev then
- setnext(prev,base)
- end
- if next then
- setprev(next,base)
- end
- setnext(base,next)
- setprev(base,prev)
- return head, base
- end
-end
-
--- The next code is somewhat complicated by the fact that some fonts can have ligatures made
--- from ligatures that themselves have marks. This was identified by Kai in for instance
--- arabtype: KAF LAM SHADDA ALEF FATHA (0x0643 0x0644 0x0651 0x0627 0x064E). This becomes
--- KAF LAM-ALEF with a SHADDA on the first and a FATHA op de second component. In a next
--- iteration this becomes a KAF-LAM-ALEF with a SHADDA on the second and a FATHA on the
--- third component.
-
-local function getcomponentindex(start) -- we could store this offset in the glyph (nofcomponents)
- if getid(start) ~= glyph_code then -- and then get rid of all components
- return 0
- elseif getsubtype(start) == ligature_code then
- local i = 0
- local components = getfield(start,"components")
- while components do
- i = i + getcomponentindex(components)
- components = getnext(components)
- end
- return i
- elseif not marks[getchar(start)] then
- return 1
- else
- return 0
- end
-end
-
-local a_noligature = attributes.private("noligature")
-
-local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) -- brr head
- if getattr(start,a_noligature) == 1 then
- -- so we can do: e\noligature{ff}e e\noligature{f}fie (we only look at the first)
- return head, start
- end
- if start == stop and getchar(start) == char then
- resetinjection(start)
- setchar(start,char)
- return head, start
- end
- -- needs testing (side effects):
- local components = getfield(start,"components")
- if components then
- -- we get a double free .. needs checking
- -- flush_node_list(components)
- end
- --
- local prev = getprev(start)
- local next = getnext(stop)
- local comp = start
- setprev(start,nil)
- setnext(stop,nil)
- local base = copy_glyph(start)
- if start == head then
- head = base
- end
- resetinjection(base)
- setchar(base,char)
- setsubtype(base,ligature_code)
- setfield(base,"components",comp) -- start can have components ... do we need to flush?
- if prev then
- setnext(prev,base)
- end
- if next then
- setprev(next,base)
- end
- setprev(base,prev)
- setnext(base,next)
- if not discfound then
- local deletemarks = markflag ~= "mark"
- local components = start
- local baseindex = 0
- local componentindex = 0
- local head = base
- local current = base
- -- first we loop over the glyphs in start .. stop
- while start do
- local char = getchar(start)
- if not marks[char] then
- baseindex = baseindex + componentindex
- componentindex = getcomponentindex(start)
- elseif not deletemarks then -- quite fishy
- setligaindex(start,baseindex + getligaindex(start,componentindex))
- if trace_marks then
- logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start))
- end
- local n = copy_node(start)
- copyinjection(n,start)
- head, current = insert_node_after(head,current,n) -- unlikely that mark has components
- elseif trace_marks then
- logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char))
- end
- start = getnext(start)
- end
- -- we can have one accent as part of a lookup and another following
- -- local start = components -- was wrong (component scanning was introduced when more complex ligs in devanagari was added)
- local start = getnext(current)
- while start and getid(start) == glyph_code do
- local char = getchar(start)
- if marks[char] then
- setligaindex(start,baseindex + getligaindex(start,componentindex))
- if trace_marks then
- logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start))
- end
- else
- break
- end
- start = getnext(start)
- end
- else
- -- discfound ... forget about marks .. probably no scripts that hyphenate and have marks
- local discprev = getprev(discfound)
- local discnext = getnext(discfound)
- if discprev and discnext then
- -- we assume normalization in context, and don't care about generic ... especially
- -- \- can give problems as there we can have a negative char but that won't match
- -- anyway
- local pre = getfield(discfound,"pre")
- local post = getfield(discfound,"post")
- local replace = getfield(discfound,"replace")
- if not replace then -- todo: signal simple hyphen
- local prev = getprev(base)
- local copied = copy_node_list(comp)
- setprev(discnext,nil) -- also blocks funny assignments
- setnext(discprev,nil) -- also blocks funny assignments
- if pre then
- setnext(discprev,pre)
- setprev(pre,discprev)
- end
- pre = comp
- if post then
- local tail = find_node_tail(post)
- setnext(tail,discnext)
- setprev(discnext,tail)
- setprev(post,nil)
- else
- post = discnext
- end
- setnext(prev,discfound)
- setprev(discfound,prev)
- setnext(discfound,next)
- setprev(next,discfound)
- setnext(base,nil)
- setprev(base,nil)
- setfield(base,"components",copied)
- setfield(discfound,"pre",pre)
- setfield(discfound,"post",post)
- setfield(discfound,"replace",base)
- setsubtype(discfound,discretionary_code)
- base = prev -- restart
- end
- end
- end
- return head, base
-end
-
-local function multiple_glyphs(head,start,multiple,ignoremarks)
- local nofmultiples = #multiple
- if nofmultiples > 0 then
- resetinjection(start)
- setchar(start,multiple[1])
- if nofmultiples > 1 then
- local sn = getnext(start)
- for k=2,nofmultiples do -- todo: use insert_node
--- untested:
---
--- while ignoremarks and marks[getchar(sn)] then
--- local sn = getnext(sn)
--- end
- local n = copy_node(start) -- ignore components
- resetinjection(n)
- setchar(n,multiple[k])
- setprev(n,start)
- setnext(n,sn)
- if sn then
- setprev(sn,n)
- end
- setnext(start,n)
- start = n
- end
- end
- return head, start, true
- else
- if trace_multiples then
- logprocess("no multiple for %s",gref(getchar(start)))
- end
- return head, start, false
- end
-end
-
-local function get_alternative_glyph(start,alternatives,value,trace_alternatives)
- local n = #alternatives
- if value == "random" then
- local r = getrandom and getrandom("glyph",1,n) or random(1,n)
- return alternatives[r], trace_alternatives and formatters["value %a, taking %a"](value,r)
- elseif value == "first" then
- return alternatives[1], trace_alternatives and formatters["value %a, taking %a"](value,1)
- elseif value == "last" then
- return alternatives[n], trace_alternatives and formatters["value %a, taking %a"](value,n)
- else
- value = tonumber(value)
- if type(value) ~= "number" then
- return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
- elseif value > n then
- local defaultalt = otf.defaultnodealternate
- if defaultalt == "first" then
- return alternatives[n], trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
- elseif defaultalt == "last" then
- return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,n)
- else
- return false, trace_alternatives and formatters["invalid value %a, %s"](value,"out of range")
- end
- elseif value == 0 then
- return getchar(start), trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
- elseif value < 1 then
- return alternatives[1], trace_alternatives and formatters["invalid value %a, taking %a"](value,1)
- else
- return alternatives[value], trace_alternatives and formatters["value %a, taking %a"](value,value)
- end
- end
-end
-
--- handlers
-
-function handlers.gsub_single(head,start,kind,lookupname,replacement)
- if trace_singles then
- logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(getchar(start)),gref(replacement))
- end
- resetinjection(start)
- setchar(start,replacement)
- return head, start, true
-end
-
-function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence)
- local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
- local choice, comment = get_alternative_glyph(start,alternative,value,trace_alternatives)
- if choice then
- if trace_alternatives then
- logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(getchar(start)),choice,gref(choice),comment)
- end
- resetinjection(start)
- setchar(start,choice)
- else
- if trace_alternatives then
- logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(getchar(start)),comment)
- end
- end
- return head, start, true
-end
-
-function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence)
- if trace_multiples then
- logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(getchar(start)),gref(multiple))
- end
- return multiple_glyphs(head,start,multiple,sequence.flags[1])
-end
-
-function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
- local s, stop = getnext(start), nil
- local startchar = getchar(start)
- if marks[startchar] then
- while s do
- local id = getid(s)
- if id == glyph_code and getfont(s) == currentfont and getsubtype(s)<256 then
- local lg = ligature[getchar(s)]
- if lg then
- stop = s
- ligature = lg
- s = getnext(s)
- else
- break
- end
- else
- break
- end
- end
- if stop then
- local lig = ligature.ligature
- if lig then
- if trace_ligatures then
- local stopchar = getchar(stop)
- head, start = markstoligature(kind,lookupname,head,start,stop,lig)
- logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start)))
- else
- head, start = markstoligature(kind,lookupname,head,start,stop,lig)
- end
- return head, start, true, false
- else
- -- ok, goto next lookup
- end
- end
- else
- local skipmark = sequence.flags[1]
- local discfound = false
- local lastdisc = nil
- while s do
- local id = getid(s)
- if id == glyph_code and getsubtype(s)<256 then -- not needed
- if getfont(s) == currentfont then -- also not needed only when mark
- local char = getchar(s)
- if skipmark and marks[char] then
- s = getnext(s)
- else -- ligature is a tree
- local lg = ligature[char] -- can there be multiple in a row? maybe in a bad font
- if lg then
- if not discfound and lastdisc then
- discfound = lastdisc
- lastdisc = nil
- end
- stop = s -- needed for fake so outside then
- ligature = lg
- s = getnext(s)
- else
- break
- end
- end
- else
- break
- end
- elseif id == disc_code then
- lastdisc = s
- s = getnext(s)
- else
- break
- end
- end
- local lig = ligature.ligature -- can't we get rid of this .ligature?
- if lig then
- if stop then
- if trace_ligatures then
- local stopchar = getchar(stop)
- head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
- logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start)))
- else
- head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
- end
- else
- -- weird but happens (in some arabic font)
- resetinjection(start)
- setchar(start,lig)
- if trace_ligatures then
- logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig))
- end
- end
- return head, start, true, discfound
- else
- -- weird but happens, pseudo ligatures ... just the components
- end
- end
- return head, start, false, discfound
-end
-
-function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence,lookuphash,i,injection)
- local startchar = getchar(start)
- local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,injection) -- ,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
- end
- return head, start, false
-end
-
-function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence,lookuphash,i,injection)
- -- todo: kerns in disc nodes: pre, post, replace -> loop over disc too
- -- todo: kerns in components of ligatures
- local snext = getnext(start)
- if not snext then
- return head, start, false
- else
- local prev = start
- local done = false
- local factor = tfmdata.parameters.factor
- local lookuptype = lookuptypes[lookupname]
- while snext and getid(snext) == glyph_code and getfont(snext) == currentfont and getsubtype(snext)<256 do
- local nextchar = getchar(snext)
- local krn = kerns[nextchar]
- if not krn and marks[nextchar] then
- prev = snext
- snext = getnext(snext)
- else
- if not krn then
- -- skip
- elseif type(krn) == "table" then
- if lookuptype == "pair" then -- probably not needed
- local a, b = krn[2], krn[3]
- if a and #a > 0 then
- local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,injection) -- characters[startchar])
- if trace_kerns then
- local startchar = getchar(start)
- logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- if b and #b > 0 then
- local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,injection) -- characters[nextchar])
- if trace_kerns then
- local startchar = getchar(start)
- logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- else -- wrong ... position has different entries
- report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
- -- local a, b = krn[2], krn[6]
- -- if a and a ~= 0 then
- -- local k = setkern(snext,factor,rlmode,a)
- -- if trace_kerns then
- -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar))
- -- end
- -- end
- -- if b and b ~= 0 then
- -- logwarning("%s: ignoring second kern xoff %s",pref(kind,lookupname),b*factor)
- -- end
- end
- done = true
- elseif krn ~= 0 then
- local k = setkern(snext,factor,rlmode,krn,injection)
- if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar)) -- prev?
- end
- done = true
- end
- break
- end
- end
- return head, start, done
- end
-end
-
---[[ldx--
-<p>We get hits on a mark, but we're not sure if the it has to be applied so
-we need to explicitly test for basechar, baselig and basemark entries.</p>
---ldx]]--
-
-function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence)
- local markchar = getchar(start)
- if marks[markchar] then
- local base = getprev(start) -- [glyph] [start=mark]
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- local basechar = getchar(base)
- if marks[basechar] then
- while true do
- base = getprev(base)
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- basechar = getchar(base)
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
- end
- end
- end
- local baseanchors = descriptions[basechar]
- if baseanchors then
- baseanchors = baseanchors.anchors
- end
- if baseanchors then
- local baseanchors = baseanchors['basechar']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
- pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head, start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar))
- end
- end
- elseif trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no char",pref(kind,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
-end
-
-function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence)
- -- check chainpos variant
- local markchar = getchar(start)
- if marks[markchar] then
- local base = getprev(start) -- [glyph] [optional marks] [start=mark]
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- local basechar = getchar(base)
- if marks[basechar] then
- while true do
- base = getprev(base)
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- basechar = getchar(base)
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
- end
- end
- end
- local index = getligaindex(start)
- local baseanchors = descriptions[basechar]
- if baseanchors then
- baseanchors = baseanchors.anchors
- if baseanchors then
- local baseanchors = baseanchors['baselig']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor, ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- ba = ba[index]
- if ba then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) -- index
- if trace_marks then
- logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
- pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
- end
- return head, start, true
- else
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(kind,lookupname),gref(markchar),gref(basechar),index)
- end
- end
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no char",pref(kind,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
-end
-
-function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence)
- local markchar = getchar(start)
- if marks[markchar] then
- local base = getprev(start) -- [glyph] [basemark] [start=mark]
- local slc = getligaindex(start)
- if slc then -- a rather messy loop ... needs checking with husayni
- while base do
- local blc = getligaindex(base)
- if blc and blc ~= slc then
- base = getprev(base)
- else
- break
- end
- end
- end
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then -- subtype test can go
- local basechar = getchar(base)
- local baseanchors = descriptions[basechar]
- if baseanchors then
- baseanchors = baseanchors.anchors
- if baseanchors then
- baseanchors = baseanchors['basemark']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar],true)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
- pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head, start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no mark",pref(kind,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
-end
-
-function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) -- to be checked
- local alreadydone = cursonce and getprop(start,a_cursbase)
- if not alreadydone then
- local done = false
- local startchar = getchar(start)
- if marks[startchar] then
- if trace_cursive then
- logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
- end
- else
- local nxt = getnext(start)
- while not done and nxt and getid(nxt) == glyph_code and getfont(nxt) == currentfont and getsubtype(nxt)<256 do
- local nextchar = getchar(nxt)
- if marks[nextchar] then
- -- should not happen (maybe warning)
- nxt = getnext(nxt)
- else
- local entryanchors = descriptions[nextchar]
- if entryanchors then
- entryanchors = entryanchors.anchors
- if entryanchors then
- entryanchors = entryanchors['centry']
- if entryanchors then
- local al = anchorlookups[lookupname]
- for anchor, entry in next, entryanchors do
- if al[anchor] then
- local exit = exitanchors[anchor]
- if exit then
- local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
- if trace_cursive then
- logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
- end
- done = true
- break
- end
- end
- end
- end
- end
- elseif trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
- onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
- end
- break
- end
- end
- end
- return head, start, done
- else
- if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
- end
- return head, start, false
- end
-end
-
---[[ldx--
-<p>I will implement multiple chain replacements once I run into a font that uses
-it. It's not that complex to handle.</p>
---ldx]]--
-
-local chainprocs = { }
-
-local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_subchain(...)
-end
-
-local logwarning = report_subchain
-
-local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_chain(...)
-end
-
-local logwarning = report_chain
-
--- We could share functions but that would lead to extra function calls with many
--- arguments, redundant tests and confusing messages.
-
-function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname)
- logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
- return head, start, false
-end
-
--- The reversesub is a special case, which is why we need to store the replacements
--- in a bit weird way. There is no lookup and the replacement comes from the lookup
--- itself. It is meant mostly for dealing with Urdu.
-
-function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements)
- local char = getchar(start)
- local replacement = replacements[char]
- if replacement then
- if trace_singles then
- logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
- end
- resetinjection(start)
- setchar(start,replacement)
- return head, start, true
- else
- return head, start, false
- end
-end
-
---[[ldx--
-<p>This chain stuff is somewhat tricky since we can have a sequence of actions to be
-applied: single, alternate, multiple or ligature where ligature can be an invalid
-one in the sense that it will replace multiple by one but not neccessary one that
-looks like the combination (i.e. it is the counterpart of multiple then). For
-example, the following is valid:</p>
-
-<typing>
-<line>xxxabcdexxx [single a->A][multiple b->BCD][ligature cde->E] xxxABCDExxx</line>
-</typing>
-
---[[ldx--
-<p>Here we replace start by a single variant.</p>
---ldx]]--
-
-function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
- -- todo: marks ?
- local current = start
- local subtables = currentlookup.subtables
- if #subtables > 1 then
- logwarning("todo: check if we need to loop over the replacements: % t",subtables)
- end
- while current do
- if getid(current) == glyph_code then
- local currentchar = getchar(current)
- local lookupname = subtables[1] -- only 1
- local replacement = lookuphash[lookupname]
- if not replacement then
- if trace_bugs then
- logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
- end
- else
- replacement = replacement[currentchar]
- if not replacement or replacement == "" then
- if trace_bugs then
- logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar))
- end
- else
- if trace_singles then
- logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
- end
- resetinjection(current)
- setchar(current,replacement)
- end
- end
- return head, start, true
- elseif current == stop then
- break
- else
- current = getnext(current)
- end
- end
- return head, start, false
-end
-
---[[ldx--
-<p>Here we replace start by a sequence of new glyphs.</p>
---ldx]]--
-
-function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- -- local head, n = delete_till_stop(head,start,stop)
- local startchar = getchar(start)
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local replacements = lookuphash[lookupname]
- if not replacements then
- if trace_bugs then
- logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname))
- end
- else
- replacements = replacements[startchar]
- if not replacements or replacement == "" then
- if trace_bugs then
- logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar))
- end
- else
- if trace_multiples then
- logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements))
- end
- return multiple_glyphs(head,start,replacements,currentlookup.flags[1])
- end
- end
- return head, start, false
-end
-
---[[ldx--
-<p>Here we replace start by new glyph. First we delete the rest of the match.</p>
---ldx]]--
-
--- char_1 mark_1 -> char_x mark_1 (ignore marks)
--- char_1 mark_1 -> char_x
-
--- to be checked: do we always have just one glyph?
--- we can also have alternates for marks
--- marks come last anyway
--- are there cases where we need to delete the mark
-
-function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local current = start
- local subtables = currentlookup.subtables
- local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
- while current do
- if getid(current) == glyph_code then -- is this check needed?
- local currentchar = getchar(current)
- local lookupname = subtables[1]
- local alternatives = lookuphash[lookupname]
- if not alternatives then
- if trace_bugs then
- logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname))
- end
- else
- alternatives = alternatives[currentchar]
- if alternatives then
- local choice, comment = get_alternative_glyph(current,alternatives,value,trace_alternatives)
- if choice then
- if trace_alternatives then
- logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment)
- end
- resetinjection(start)
- setchar(start,choice)
- else
- if trace_alternatives then
- logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment)
- end
- end
- elseif trace_bugs then
- logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment)
- end
- end
- return head, start, true
- elseif current == stop then
- break
- else
- current = getnext(current)
- end
- end
- return head, start, false
-end
-
---[[ldx--
-<p>When we replace ligatures we use a helper that handles the marks. I might change
-this function (move code inline and handle the marks by a separate function). We
-assume rather stupid ligatures (no complex disc nodes).</p>
---ldx]]--
-
-function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
- local startchar = getchar(start)
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local ligatures = lookuphash[lookupname]
- if not ligatures then
- if trace_bugs then
- logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
- end
- else
- ligatures = ligatures[startchar]
- if not ligatures then
- if trace_bugs then
- logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
- end
- else
- local s = getnext(start)
- local discfound = false
- local last = stop
- local nofreplacements = 1
- local skipmark = currentlookup.flags[1]
- while s do
- local id = getid(s)
- if id == disc_code then
- if not discfound then
- discfound = s
- end
- if s == stop then
- break -- okay? or before the disc
- else
- s = getnext(s)
- end
- else
- local schar = getchar(s)
- if skipmark and marks[schar] then -- marks
- s = getnext(s)
- else
- local lg = ligatures[schar]
- if lg then
- ligatures, last, nofreplacements = lg, s, nofreplacements + 1
- if s == stop then
- break
- else
- s = getnext(s)
- end
- else
- break
- end
- end
- end
- end
- local l2 = ligatures.ligature
- if l2 then
- if chainindex then
- stop = last
- end
- if trace_ligatures then
- if start == stop then
- logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
- else
- logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)),gref(l2))
- end
- end
- head, start = toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound)
- return head, start, true, nofreplacements, discfound
- elseif trace_bugs then
- if start == stop then
- logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
- else
- logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)))
- end
- end
- end
- end
- return head, start, false, 0, false
-end
-
-function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
- -- untested .. needs checking for the new model
- local startchar = getchar(start)
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local kerns = lookuphash[lookupname]
- if kerns then
- kerns = kerns[startchar] -- needed ?
- if kerns then
- local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns) -- ,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
- end
- end
- end
- return head, start, false
-end
-
-function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
- local snext = getnext(start)
- if snext then
- local startchar = getchar(start)
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local kerns = lookuphash[lookupname]
- if kerns then
- kerns = kerns[startchar]
- if kerns then
- local lookuptype = lookuptypes[lookupname]
- local prev, done = start, false
- local factor = tfmdata.parameters.factor
- while snext and getid(snext) == glyph_code and getfont(snext) == currentfont and getsubtype(snext)<256 do
- local nextchar = getchar(snext)
- local krn = kerns[nextchar]
- if not krn and marks[nextchar] then
- prev = snext
- snext = getnext(snext)
- else
- if not krn then
- -- skip
- elseif type(krn) == "table" then
- if lookuptype == "pair" then
- local a, b = krn[2], krn[3]
- if a and #a > 0 then
- local startchar = getchar(start)
- local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a) -- ,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- if b and #b > 0 then
- local startchar = getchar(start)
- local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b) -- ,characters[nextchar])
- if trace_kerns then
- logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- else
- report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
- -- local a, b = krn[2], krn[6]
- -- if a and a ~= 0 then
- -- local k = setkern(snext,factor,rlmode,a)
- -- if trace_kerns then
- -- logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar))
- -- end
- -- end
- -- if b and b ~= 0 then
- -- logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor)
- -- end
- end
- done = true
- elseif krn ~= 0 then
- local k = setkern(snext,factor,rlmode,krn)
- if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar))
- end
- done = true
- end
- break
- end
- end
- return head, start, done
- end
- end
- end
- return head, start, false
-end
-
-function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = getchar(start)
- if marks[markchar] then
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = lookuphash[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = getprev(start) -- [glyph] [start=mark]
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- local basechar = getchar(base)
- if marks[basechar] then
- while true do
- base = getprev(base)
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- basechar = getchar(base)
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
- end
- end
- end
- local baseanchors = descriptions[basechar].anchors
- if baseanchors then
- local baseanchors = baseanchors['basechar']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head, start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
- end
- return head, start, false
-end
-
-function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = getchar(start)
- if marks[markchar] then
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = lookuphash[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = getprev(start) -- [glyph] [optional marks] [start=mark]
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- local basechar = getchar(base)
- if marks[basechar] then
- while true do
- base = getprev(base)
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- basechar = getchar(base)
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar)
- end
- return head, start, false
- end
- end
- end
- -- todo: like marks a ligatures hash
- local index = getligaindex(start)
- local baseanchors = descriptions[basechar].anchors
- if baseanchors then
- local baseanchors = baseanchors['baselig']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- ba = ba[index]
- if ba then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
- end
- return head, start, true
- end
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname)
- end
- elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
- end
- return head, start, false
-end
-
-function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = getchar(start)
- if marks[markchar] then
- -- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = lookuphash[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = getprev(start) -- [glyph] [basemark] [start=mark]
- local slc = getligaindex(start)
- if slc then -- a rather messy loop ... needs checking with husayni
- while base do
- local blc = getligaindex(base)
- if blc and blc ~= slc then
- base = getprev(base)
- else
- break
- end
- end
- end
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then -- subtype test can go
- local basechar = getchar(base)
- local baseanchors = descriptions[basechar].anchors
- if baseanchors then
- baseanchors = baseanchors['basemark']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar],true)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head, start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
- end
- return head, start, false
-end
-
-function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local alreadydone = cursonce and getprop(start,a_cursbase)
- if not alreadydone then
- local startchar = getchar(start)
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local exitanchors = lookuphash[lookupname]
- if exitanchors then
- exitanchors = exitanchors[startchar]
- end
- if exitanchors then
- local done = false
- if marks[startchar] then
- if trace_cursive then
- logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
- end
- else
- local nxt = getnext(start)
- while not done and nxt and getid(nxt) == glyph_code and getfont(nxt) == currentfont and getsubtype(nxt)<256 do
- local nextchar = getchar(nxt)
- if marks[nextchar] then
- -- should not happen (maybe warning)
- nxt = getnext(nxt)
- else
- local entryanchors = descriptions[nextchar]
- if entryanchors then
- entryanchors = entryanchors.anchors
- if entryanchors then
- entryanchors = entryanchors['centry']
- if entryanchors then
- local al = anchorlookups[lookupname]
- for anchor, entry in next, entryanchors do
- if al[anchor] then
- local exit = exitanchors[anchor]
- if exit then
- local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
- if trace_cursive then
- logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
- end
- done = true
- break
- end
- end
- end
- end
- end
- elseif trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
- onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
- end
- break
- end
- end
- end
- return head, start, done
- else
- if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
- end
- return head, start, false
- end
- end
- return head, start, false
-end
-
--- what pointer to return, spec says stop
--- to be discussed ... is bidi changer a space?
--- elseif char == zwnj and sequence[n][32] then -- brrr
-
--- somehow l or f is global
--- we don't need to pass the currentcontext, saves a bit
--- make a slow variant then can be activated but with more tracing
-
-local function show_skip(kind,chainname,char,ck,class)
- if ck[9] then
- logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
- else
- logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2])
- end
-end
-
--- A previous version had disc collapsing code in the (single sub) handler plus some
--- checking in the main loop, but that left the pre/post sequences undone. The best
--- solution is to add some checking there and backtrack when a replace/post matches
--- but it takes a bit of work to figure out an efficient way (this is what the sweep*
--- names refer to). I might look into that variant one day again as it can replace
--- some other code too. In that approach we can have a special version for gub and pos
--- which gains some speed. This method does the test and passes info to the handlers
--- (sweepnode, sweepmode, sweepprev, sweepnext, etc). Here collapsing is handled in the
--- main loop which also makes code elsewhere simpler (i.e. no need for the other special
--- runners and disc code in ligature building). I also experimented with pushing preceding
--- glyphs sequences in the replace/pre fields beforehand which saves checking afterwards
--- but at the cost of duplicate glyphs (memory) but it's too much overhead (runtime).
---
--- In the meantime Kai had moved the code from the single chain into a more general handler
--- and this one (renamed to chaindisk) is used now. I optimized the code a bit and brought
--- it in sycn with the other code. Hopefully I didn't introduce errors. Note: this somewhat
--- complex approach is meant for fonts that implement (for instance) ligatures by character
--- replacement which to some extend is not that suitable for hyphenation. I also use some
--- helpers. This method passes some states but reparses the list. There is room for a bit of
--- speed up but that will be done in the context version. (In fact a partial rewrite of all
--- code can bring some more efficientry.)
---
--- I didn't test it with extremes but successive disc nodes still can give issues but in
--- order to handle that we need more complex code which also slows down even more. The main
--- loop variant could deal with that: test, collapse, backtrack.
-
-local function chaindisk(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,chainindex,sequence,chainproc)
-
- if not start then
- return head, start, false
- end
-
- local startishead = start == head
- local seq = ck[3]
- local f = ck[4]
- local l = ck[5]
- local s = #seq
- local done = false
- local sweepnode = sweepnode
- local sweeptype = sweeptype
- local sweepoverflow = false
- local checkdisc = getprev(head) -- hm bad name head
- local keepdisc = not sweepnode
- local lookaheaddisc = nil
- local backtrackdisc = nil
- local current = start
- local last = start
- local prev = getprev(start)
-
- -- fishy: so we can overflow and then go on in the sweep?
-
- local i = f
- while i <= l do
- local id = getid(current)
- if id == glyph_code then
- i = i + 1
- last = current
- current = getnext(current)
- elseif id == disc_code then
- if keepdisc then
- keepdisc = false
- if notmatchpre[current] ~= notmatchreplace[current] then
- lookaheaddisc = current
- end
- local replace = getfield(current,"replace")
- while replace and i <= l do
- if getid(replace) == glyph_code then
- i = i + 1
- end
- replace = getnext(replace)
- end
- last = current
- current = getnext(c)
- else
- head, current = flattendisk(head,current)
- end
- else
- last = current
- current = getnext(current)
- end
- if current then
- -- go on
- elseif sweepoverflow then
- -- we already are folling up on sweepnode
- break
- elseif sweeptype == "post" or sweeptype == "replace" then
- current = getnext(sweepnode)
- if current then
- sweeptype = nil
- sweepoverflow = true
- else
- break
- end
- else
- break -- added
- end
- end
-
- if sweepoverflow then
- local prev = current and getprev(current)
- if not current or prev ~= sweepnode then
- local head = getnext(sweepnode)
- local tail = nil
- if prev then
- tail = prev
- setprev(current,sweepnode)
- else
- tail = find_node_tail(head)
- end
- setnext(sweepnode,current)
- setprev(head,nil)
- setnext(tail,nil)
- appenddisc(sweepnode,head)
- end
- end
-
- if l < s then
- local i = l
- local t = sweeptype == "post" or sweeptype == "replace"
- while current and i < s do
- local id = getid(current)
- if id == glyph_code then
- i = i + 1
- current = getnext(current)
- elseif id == disc_code then
- if keepdisc then
- keepdisc = false
- if notmatchpre[current] ~= notmatchreplace[current] then
- lookaheaddisc = current
- end
- local replace = getfield(c,"replace")
- while replace and i < s do
- if getid(replace) == glyph_code then
- i = i + 1
- end
- replace = getnext(replace)
- end
- current = getnext(current)
- elseif notmatchpre[current] ~= notmatchreplace[current] then
- head, current = flattendisk(head,current)
- else
- current = getnext(current) -- HH
- end
- else
- current = getnext(current)
- end
- if not current and t then
- current = getnext(sweepnode)
- if current then
- sweeptype = nil
- end
- end
- end
- end
-
- if f > 1 then
- local current = prev
- local i = f
- local t = sweeptype == "pre" or sweeptype == "replace"
- if not current and t and current == checkdisk then
- current = getprev(sweepnode)
- end
- while current and i > 1 do -- missing getprev added / moved outside
- local id = getid(current)
- if id == glyph_code then
- i = i - 1
- elseif id == disc_code then
- if keepdisc then
- keepdisc = false
- if notmatchpost[current] ~= notmatchreplace[current] then
- backtrackdisc = current
- end
- local replace = getfield(current,"replace")
- while replace and i > 1 do
- if getid(replace) == glyph_code then
- i = i - 1
- end
- replace = getnext(replace)
- end
- elseif notmatchpost[current] ~= notmatchreplace[current] then
- head, current = flattendisk(head,current)
- end
- end
- current = getprev(current)
- if t and current == checkdisk then
- current = getprev(sweepnode)
- end
- end
- end
-
- local ok = false
- if lookaheaddisc then
-
- local cf = start
- local cl = getprev(lookaheaddisc)
- local cprev = getprev(start)
- local insertedmarks = 0
-
- while cprev and getid(cf) == glyph_code and getfont(cf) == currentfont and getsubtype(cf) < 256 and marks[getchar(cf)] do
- insertedmarks = insertedmarks + 1
- cf = cprev
- startishead = cf == head
- cprev = getprev(cprev)
- end
-
- setprev(lookaheaddisc,cprev)
- if cprev then
- setnext(cprev,lookaheaddisc)
- end
- setprev(cf,nil)
- setnext(cl,nil)
- if startishead then
- head = lookaheaddisc
- end
-
- local replace = getfield(lookaheaddisc,"replace")
- local pre = getfield(lookaheaddisc,"pre")
- local new = copy_node_list(cf)
- local cnew = new
- for i=1,insertedmarks do
- cnew = getnext(cnew)
- end
- local clast = cnew
- for i=f,l do
- clast = getnext(clast)
- end
- if not notmatchpre[lookaheaddisc] then
- cf, start, ok = chainproc(cf,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
- end
- if not notmatchreplace[lookaheaddisc] then
- new, cnew, ok = chainproc(new,cnew,clast,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
- end
- if pre then
- setnext(cl,pre)
- setprev(pre,cl)
- end
- if replace then
- local tail = find_node_tail(new)
- setnext(tail,replace)
- setprev(replace,tail)
- end
- setfield(lookaheaddisc,"pre",cf) -- also updates tail
- setfield(lookaheaddisc,"replace",new) -- also updates tail
-
- start = getprev(lookaheaddisc)
- sweephead[cf] = getnext(clast)
- sweephead[new] = getnext(last)
-
- elseif backtrackdisc then
-
- local cf = getnext(backtrackdisc)
- local cl = start
- local cnext = getnext(start)
- local insertedmarks = 0
-
- while cnext and getid(cnext) == glyph_code and getfont(cnext) == currentfont and getsubtype(cnext) < 256 and marks[getchar(cnext)] do
- insertedmarks = insertedmarks + 1
- cl = cnext
- cnext = getnext(cnext)
- end
- if cnext then
- setprev(cnext,backtrackdisc)
- end
- setnext(backtrackdisc,cnext)
- setprev(cf,nil)
- setnext(cl,nil)
- local replace = getfield(backtrackdisc,"replace")
- local post = getfield(backtrackdisc,"post")
- local new = copy_node_list(cf)
- local cnew = find_node_tail(new)
- for i=1,insertedmarks do
- cnew = getprev(cnew)
- end
- local clast = cnew
- for i=f,l do
- clast = getnext(clast)
- end
- if not notmatchpost[backtrackdisc] then
- cf, start, ok = chainproc(cf,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
- end
- if not notmatchreplace[backtrackdisc] then
- new, cnew, ok = chainproc(new,cnew,clast,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
- end
- if post then
- local tail = find_node_tail(post)
- setnext(tail,cf)
- setprev(cf,tail)
- else
- post = cf
- end
- if replace then
- local tail = find_node_tail(replace)
- setnext(tail,new)
- setprev(new,tail)
- else
- replace = new
- end
- setfield(backtrackdisc,"post",post) -- also updates tail
- setfield(backtrackdisc,"replace",replace) -- also updates tail
- start = getprev(backtrackdisc)
- sweephead[post] = getnext(clast)
- sweephead[replace] = getnext(last)
-
- else
-
- head, start, ok = chainproc(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
-
- end
-
- return head, start, ok
-end
-
-local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash)
- local sweepnode = sweepnode
- local sweeptype = sweeptype
- local diskseen = false
- local checkdisc = getprev(head)
- local flags = sequence.flags
- local done = false
- local skipmark = flags[1]
- local skipligature = flags[2]
- local skipbase = flags[3]
- local markclass = sequence.markclass
- local skipped = false
-
- for k=1,#contexts do -- i've only seen ccmp having > 1 (e.g. dejavu)
- local match = true
- local current = start
- local last = start
- local ck = contexts[k]
- local seq = ck[3]
- local s = #seq
- -- f..l = mid string
- if s == 1 then
- -- never happens
- match = getid(current) == glyph_code and getfont(current) == currentfont and getsubtype(current)<256 and seq[1][getchar(current)]
- else
- -- maybe we need a better space check (maybe check for glue or category or combination)
- -- we cannot optimize for n=2 because there can be disc nodes
- local f = ck[4]
- local l = ck[5]
- -- current match
- if f == 1 and f == l then -- current only
- -- already a hit
- -- match = true
- else -- before/current/after | before/current | current/after
- -- no need to test first hit (to be optimized)
- if f == l then -- new, else last out of sync (f is > 1)
- -- match = true
- else
- local discfound = nil
- local n = f + 1
- last = getnext(last)
- while n <= l do
- if not last and (sweeptype == "post" or sweeptype == "replace") then
- last = getnext(sweepnode)
- sweeptype = nil
- end
- if last then
- local id = getid(last)
- if id == glyph_code then
- if getfont(last) == currentfont and getsubtype(last)<256 then
- local char = getchar(last)
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class or "base"
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
- last = getnext(last)
- elseif seq[n][char] then
- if n < l then
- last = getnext(last)
- end
- n = n + 1
- else
- if discfound then
- notmatchreplace[discfound] = true
- match = not notmatchpre[discfound]
- else
- match = false
- end
- break
- end
- else
- if discfound then
- notmatchreplace[discfound] = true
- match = not notmatchpre[discfound]
- else
- match = false
- end
- break
- end
- else
- if discfound then
- notmatchreplace[discfound] = true
- match = not notmatchpre[discfound]
- else
- match = false
- end
- break
- end
- elseif id == disc_code then
- diskseen = true
- discfound = last
- notmatchpre[last] = nil
- notmatchpost[last] = true
- notmatchreplace[last] = nil
- local pre = getfield(last,"pre")
- local replace = getfield(last,"replace")
- if pre then
- local n = n
- while pre do
- if seq[n][getchar(pre)] then
- n = n + 1
- pre = getnext(pre)
- if n > l then
- break
- end
- else
- notmatchpre[last] = true
- break
- end
- end
- if n <= l then
- notmatchpre[last] = true
- end
- else
- notmatchpre[last] = true
- end
- if replace then
- -- so far we never entered this branch
- while replace do
- if seq[n][getchar(replace)] then
- n = n + 1
- replace = getnext(replace)
- if n > l then
- break
- end
- else
- notmatchreplace[last] = true
- match = not notmatchpre[last]
- break
- end
- end
- match = not notmatchpre[last]
- end
- last = getnext(last)
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- end
- end
- end
- -- before
- if match and f > 1 then
- local prev = getprev(start)
- if prev then
- if prev == checkdisc and (sweeptype == "pre" or sweeptype == "replace") then
- prev = getprev(sweepnode)
- -- sweeptype = nil
- end
- if prev then
- local discfound = nil
- local n = f - 1
- while n >= 1 do
- if prev then
- local id = getid(prev)
- if id == glyph_code then
- if getfont(prev) == currentfont and getsubtype(prev)<256 then -- normal char
- local char = getchar(prev)
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
- elseif seq[n][char] then
- n = n -1
- else
- if discfound then
- notmatchreplace[discfound] = true
- match = not notmatchpost[discfound]
- else
- match = false
- end
- break
- end
- else
- if discfound then
- notmatchreplace[discfound] = true
- match = not notmatchpost[discfound]
- else
- match = false
- end
- break
- end
- else
- if discfound then
- notmatchreplace[discfound] = true
- match = not notmatchpost[discfound]
- else
- match = false
- end
- break
- end
- elseif id == disc_code then
- -- the special case: f i where i becomes dottless i ..
- diskseen = true
- discfound = prev
- notmatchpre[prev] = true
- notmatchpost[prev] = nil
- notmatchreplace[prev] = nil
- local pre = getfield(prev,"pre")
- local post = getfield(prev,"post")
- local replace = getfield(prev,"replace")
- if pre ~= start and post ~= start and replace ~= start then
- if post then
- local n = n
- local posttail = find_node_tail(post)
- while posttail do
- if seq[n][getchar(posttail)] then
- n = n - 1
- if posttail == post then
- break
- else
- posttail = getprev(posttail)
- if n < 1 then
- break
- end
- end
- else
- notmatchpost[prev] = true
- break
- end
- end
- if n >= 1 then
- notmatchpost[prev] = true
- end
- else
- notmatchpost[prev] = true
- end
- if replace then
- -- we seldom enter this branch (e.g. on brill efficient)
- local replacetail = find_node_tail(replace)
- while replacetail do
- if seq[n][getchar(replacetail)] then
- n = n - 1
- if replacetail == replace then
- break
- else
- replacetail = getprev(replacetail)
- if n < 1 then
- break
- end
- end
- else
- notmatchreplace[prev] = true
- match = not notmatchpost[prev]
- break
- end
- end
- if not match then
- break
- end
- else
- -- skip 'm
- end
- else
- -- skip 'm
- end
- elseif seq[n][32] then
- n = n -1
- else
- match = false
- break
- end
- prev = getprev(prev)
- elseif seq[n][32] then -- somewhat special, as zapfino can have many preceding spaces
- n = n - 1
- else
- match = false
- break
- end
- end
- else
- match = false
- end
- else
- match = false
- end
- end
- -- after
- if match and s > l then
- local current = last and getnext(last)
- if not current then
- if sweeptype == "post" or sweeptype == "replace" then
- current = getnext(sweepnode)
- -- sweeptype = nil
- end
- end
- if current then
- local discfound = nil
- -- removed optimization for s-l == 1, we have to deal with marks anyway
- local n = l + 1
- while n <= s do
- if current then
- local id = getid(current)
- if id == glyph_code then
- if getfont(current) == currentfont and getsubtype(current)<256 then -- normal char
- local char = getchar(current)
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
- elseif seq[n][char] then
- n = n + 1
- else
- if discfound then
- notmatchreplace[discfound] = true
- match = not notmatchpre[discfound]
- else
- match = false
- end
- break
- end
- else
- if discfound then
- notmatchreplace[discfound] = true
- match = not notmatchpre[discfound]
- else
- match = false
- end
- break
- end
- else
- if discfound then
- notmatchreplace[discfound] = true
- match = not notmatchpre[discfound]
- else
- match = false
- end
- break
- end
- elseif id == disc_code then
- diskseen = true
- discfound = current
- notmatchpre[current] = nil
- notmatchpost[current] = true
- notmatchreplace[current] = nil
- local pre = getfield(current,"pre")
- local replace = getfield(current,"replace")
- if pre then
- local n = n
- while pre do
- if seq[n][getchar(pre)] then
- n = n + 1
- pre = getnext(pre)
- if n > s then
- break
- end
- else
- notmatchpre[current] = true
- break
- end
- end
- if n <= s then
- notmatchpre[current] = true
- end
- else
- notmatchpre[current] = true
- end
- if replace then
- -- so far we never entered this branch
- while replace do
- if seq[n][getchar(replace)] then
- n = n + 1
- replace = getnext(replace)
- if n > s then
- break
- end
- else
- notmatchreplace[current] = true
- match = notmatchpre[current]
- break
- end
- end
- if not match then
- break
- end
- else
- -- skip 'm
- end
- elseif seq[n][32] then -- brrr
- n = n + 1
- else
- match = false
- break
- end
- current = getnext(current)
- elseif seq[n][32] then
- n = n + 1
- else
- match = false
- break
- end
- end
- else
- match = false
- end
- end
- end
- if match then
- -- can lookups be of a different type ?
- local diskchain = diskseen or sweepnode
- if trace_contexts then
- local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5]
- local char = getchar(start)
- if ck[9] then
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a",
- cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
- else
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a",
- cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
- end
- end
- local chainlookups = ck[6]
- if chainlookups then
- local nofchainlookups = #chainlookups
- -- we can speed this up if needed
- if nofchainlookups == 1 then
- local chainlookupname = chainlookups[1]
- local chainlookup = lookuptable[chainlookupname]
- if chainlookup then
- local chainproc = chainprocs[chainlookup.type]
- if chainproc then
- local ok
- if diskchain then
- head, start, ok = chaindisk(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence,chainproc)
- else
- head, start, ok = chainproc(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
- end
- if ok then
- done = true
- end
- else
- logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
- end
- else -- shouldn't happen
- logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname))
- end
- else
- local i = 1
- while start and true do
- if skipped then
- while true do -- todo: use properties
- local char = getchar(start)
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class or "base"
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- start = getnext(start)
- else
- break
- end
- else
- break
- end
- end
- end
- -- see remark in ms standard under : LookupType 5: Contextual Substitution Subtable
- local chainlookupname = chainlookups[i]
- local chainlookup = lookuptable[chainlookupname]
- if not chainlookup then
- -- we just advance
- i = i + 1
- else
- local chainproc = chainprocs[chainlookup.type]
- if not chainproc then
- -- actually an error
- logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
- i = i + 1
- else
- local ok, n
- if diskchain then
- head, start, ok = chaindisk(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence,chainproc)
- else
- head, start, ok, n = chainproc(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
- end
- -- messy since last can be changed !
- if ok then
- done = true
- if n and n > 1 then
- -- we have a ligature (cf the spec we advance one but we really need to test it
- -- as there are fonts out there that are fuzzy and have too many lookups:
- --
- -- U+1105 U+119E U+1105 U+119E : sourcehansansklight: script=hang ccmp=yes
- --
- if i + n > nofchainlookups then
- -- if trace_contexts then
- -- logprocess("%s: quitting lookups",cref(kind,chainname))
- -- end
- break
- else
- -- we need to carry one
- end
- end
- end
- i = i + 1
- end
- end
- if i > nofchainlookups or not start then
- break
- elseif start then
- start = getnext(start)
- end
- end
- end
- else
- local replacements = ck[7]
- if replacements then
- head, start, done = chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) -- sequence
- else
- done = quit_on_no_replacement -- can be meant to be skipped / quite inconsistent in fonts
- if trace_contexts then
- logprocess("%s: skipping match",cref(kind,chainname))
- end
- end
- end
- if done then
- break -- out of contexts (new, needs checking)
- end
- end
- end
- if diskseen then -- maybe move up so that we can turn checking on/off
- notmatchpre = { }
- notmatchpost = { }
- notmatchreplace = { }
- end
- return head, start, done
-end
-
--- Because we want to keep this elsewhere (an because speed is less an issue) we
--- pass the font id so that the verbose variant can access the relevant helper tables.
-
-local verbose_handle_contextchain = function(font,...)
- logwarning("no verbose handler installed, reverting to 'normal'")
- otf.setcontextchain()
- return normal_handle_contextchain(...)
-end
-
-otf.chainhandlers = {
- normal = normal_handle_contextchain,
- verbose = verbose_handle_contextchain,
-}
-
-local handle_contextchain = nil
-
--- normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash)
-
-local function chained_contextchain(head,start,stop,...)
- local steps = currentlookup.steps
- local nofsteps = currentlookup.nofsteps
- if nofsteps > 1 then
- reportmoresteps(dataset,sequence)
- end
- return handle_contextchain(head,start,...)
-end
-
-function otf.setcontextchain(method)
- if not method or method == "normal" or not otf.chainhandlers[method] then
- if handle_contextchain then -- no need for a message while making the format
- logwarning("installing normal contextchain handler")
- end
- handle_contextchain = normal_handle_contextchain
- else
- logwarning("installing contextchain handler %a",method)
- local handler = otf.chainhandlers[method]
- handle_contextchain = function(...)
- return handler(currentfont,...) -- hm, get rid of ...
- end
- end
-
- handlers.gsub_context = handle_contextchain
- handlers.gsub_contextchain = handle_contextchain
- handlers.gsub_reversecontextchain = handle_contextchain
- handlers.gpos_contextchain = handle_contextchain
- handlers.gpos_context = handle_contextchain
-
- handlers.contextchain = handle_contextchain
-
-end
-
-chainprocs.gsub_context = chained_contextchain
-chainprocs.gsub_contextchain = chained_contextchain
-chainprocs.gsub_reversecontextchain = chained_contextchain
-chainprocs.gpos_contextchain = chained_contextchain
-chainprocs.gpos_context = chained_contextchain
-
-otf.setcontextchain()
-
-local missing = { } -- we only report once
-
-local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_process(...)
-end
-
-local logwarning = report_process
-
-local function report_missing_cache(typ,lookup)
- local f = missing[currentfont] if not f then f = { } missing[currentfont] = f end
- local t = f[typ] if not t then t = { } f[typ] = t end
- if not t[lookup] then
- t[lookup] = true
- logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname)
- end
-end
-
-local resolved = { } -- we only resolve a font,script,language pair once
-
--- todo: pass all these 'locals' in a table
-
-local lookuphashes = { }
-
-setmetatableindex(lookuphashes, function(t,font)
- local lookuphash = fontdata[font].resources.lookuphash
- if not lookuphash or not next(lookuphash) then
- lookuphash = false
- end
- t[font] = lookuphash
- return lookuphash
-end)
-
--- fonts.hashes.lookups = lookuphashes
-
-local autofeatures = fonts.analyzers.features
-local featuretypes = otf.tables.featuretypes
-local defaultscript = otf.features.checkeddefaultscript
-local defaultlanguage = otf.features.checkeddefaultlanguage
-
-local function initialize(sequence,script,language,enabled,autoscript,autolanguage)
- local features = sequence.features
- if features then
- local order = sequence.order
- if order then
- local featuretype = featuretypes[sequence.type or "unknown"]
- for i=1,#order do
- local kind = order[i]
- local valid = enabled[kind]
- if valid then
- local scripts = features[kind]
- local languages = scripts and (
- scripts[script] or
- scripts[wildcard] or
- (autoscript and defaultscript(featuretype,autoscript,scripts))
- )
- local enabled = languages and (
- languages[language] or
- languages[wildcard] or
- (autolanguage and defaultlanguage(featuretype,autolanguage,languages))
- )
- if enabled then
- return { valid, autofeatures[kind] or false, sequence, kind }
- end
- end
- end
- else
- -- can't happen
- end
- end
- return false
-end
-
-function otf.dataset(tfmdata,font) -- generic variant, overloaded in context
- local shared = tfmdata.shared
- local properties = tfmdata.properties
- local language = properties.language or "dflt"
- local script = properties.script or "dflt"
- local enabled = shared.features
- local autoscript = enabled and enabled.autoscript
- local autolanguage = enabled and enabled.autolanguage
- local res = resolved[font]
- if not res then
- res = { }
- resolved[font] = res
- end
- local rs = res[script]
- if not rs then
- rs = { }
- res[script] = rs
- end
- local rl = rs[language]
- if not rl then
- rl = {
- -- indexed but we can also add specific data by key
- }
- rs[language] = rl
- local sequences = tfmdata.resources.sequences
- if sequences then
- for s=1,#sequences do
- local v = enabled and initialize(sequences[s],script,language,enabled,autoscript,autolanguage)
- if v then
- rl[#rl+1] = v
- end
- end
- end
- end
- return rl
-end
-
--- assumptions:
---
--- * languages that use complex disc nodes
-
-local function kernrun(disc,run)
- --
- -- we catch <font 1><disc font 2>
- --
- if trace_kernruns then
- report_run("kern") -- will be more detailed
- end
- --
- local prev = getprev(disc) -- todo, keep these in the main loop
- local next = getnext(disc) -- todo, keep these in the main loop
- --
- local pre = getfield(disc,"pre")
- local post = getfield(disc,"post")
- local replace = getfield(disc,"replace")
- --
- local prevmarks = prev
- --
- -- can be optional, because why on earth do we get a disc after a mark (okay, maybe when a ccmp
- -- has happened but then it should be in the disc so basically this test indicates an error)
- --
- while prevmarks and getid(prevmarks) == glyph_code and marks[getchar(prevmarks)] and getfont(prevmarks) == currentfont and getsubtype(prevmarks) < 256 do
- prevmarks = getprev(prevmarks)
- end
- --
- if prev and (pre or replace) and not (getid(prev) == glyph_code and getfont(prev) == currentfont and getsubtype(prev)<256) then
- prev = false
- end
- if next and (post or replace) and not (getid(next) == glyph_code and getfont(next) == currentfont and getsubtype(next)<256) then
- next = false
- end
- --
- if not pre then
- -- go on
- elseif prev then
- local nest = getprev(pre)
- setprev(pre,prev)
- setnext(prev,pre)
- run(prevmarks,"preinjections")
- setprev(pre,nest)
- setnext(prev,disc)
- else
- run(pre,"preinjections")
- end
- --
- if not post then
- -- go on
- elseif next then
- local tail = find_node_tail(post)
- setnext(tail,next)
- setprev(next,tail)
- run(post,"postinjections",next)
- setnext(tail,nil)
- setprev(next,disc)
- else
- run(post,"postinjections")
- end
- --
- if not replace and prev and next then
- -- this should be already done by discfound
- setnext(prev,next)
- setprev(next,prev)
- run(prevmarks,"injections",next)
- setnext(prev,disc)
- setprev(next,disc)
- elseif prev and next then
- local tail = find_node_tail(replace)
- local nest = getprev(replace)
- setprev(replace,prev)
- setnext(prev,replace)
- setnext(tail,next)
- setprev(next,tail)
- run(prevmarks,"replaceinjections",next)
- setprev(replace,nest)
- setnext(prev,disc)
- setnext(tail,nil)
- setprev(next,disc)
- elseif prev then
- local nest = getprev(replace)
- setprev(replace,prev)
- setnext(prev,replace)
- run(prevmarks,"replaceinjections")
- setprev(replace,nest)
- setnext(prev,disc)
- elseif next then
- local tail = find_node_tail(replace)
- setnext(tail,next)
- setprev(next,tail)
- run(replace,"replaceinjections",next)
- setnext(tail,nil)
- setprev(next,disc)
- else
- run(replace,"replaceinjections")
- end
-end
-
--- the if new test might be dangerous as luatex will check / set some tail stuff
--- in a temp node
-
-local function comprun(disc,run)
- if trace_compruns then
- report_run("comp: %s",languages.serializediscretionary(disc))
- end
- --
- local pre = getfield(disc,"pre")
- if pre then
- sweepnode = disc
- sweeptype = "pre" -- in alternative code preinjections is used (also used then for proeprties, saves a variable)
- local new, done = run(pre)
- if done then
- setfield(disc,"pre",new)
- end
- end
- --
- local post = getfield(disc,"post")
- if post then
- sweepnode = disc
- sweeptype = "post"
- local new, done = run(post)
- if done then
- setfield(disc,"post",new)
- end
- end
- --
- local replace = getfield(disc,"replace")
- if replace then
- sweepnode = disc
- sweeptype = "replace"
- local new, done = run(replace)
- if done then
- setfield(disc,"replace",new)
- end
- end
- sweepnode = nil
- sweeptype = nil
-end
-
-local function testrun(disc,trun,crun) -- use helper
- local next = getnext(disc)
- if next then
- local replace = getfield(disc,"replace")
- if replace then
- local prev = getprev(disc)
- if prev then
- -- only look ahead
- local tail = find_node_tail(replace)
- -- local nest = getprev(replace)
- setnext(tail,next)
- setprev(next,tail)
- if trun(replace,next) then
- setfield(disc,"replace",nil) -- beware, side effects of nest so first
- setnext(prev,replace)
- setprev(replace,prev)
- setprev(next,tail)
- setnext(tail,next)
- setprev(disc,nil)
- setnext(disc,nil)
- flush_node_list(disc)
- return replace -- restart
- else
- setnext(tail,nil)
- setprev(next,disc)
- end
- else
- -- weird case
- end
- else
- -- no need
- end
- else
- -- weird case
- end
- comprun(disc,crun)
- return next
-end
-
-local function discrun(disc,drun,krun)
- local next = getnext(disc)
- local prev = getprev(disc)
- if trace_discruns then
- report_run("disc") -- will be more detailed
- end
- if next and prev then
- setnext(prev,next)
- -- setprev(next,prev)
- drun(prev)
- setnext(prev,disc)
- -- setprev(next,disc)
- end
- --
- local pre = getfield(disc,"pre")
- if not pre then
- -- go on
- elseif prev then
- local nest = getprev(pre)
- setprev(pre,prev)
- setnext(prev,pre)
- krun(prev,"preinjections")
- setprev(pre,nest)
- setnext(prev,disc)
- else
- krun(pre,"preinjections")
- end
- return next
-end
-
--- todo: maybe run lr and rl stretches
-
-local function featuresprocessor(head,font,attr)
-
- local lookuphash = lookuphashes[font] -- we can also check sequences here
-
- if not lookuphash then
- return head, false
- end
-
- head = tonut(head)
-
- if trace_steps then
- checkstep(head)
- end
-
- tfmdata = fontdata[font]
- descriptions = tfmdata.descriptions
- characters = tfmdata.characters
- resources = tfmdata.resources
-
- marks = resources.marks
- anchorlookups = resources.lookup_to_anchor
- lookuptable = resources.lookups
- lookuptypes = resources.lookuptypes
- lookuptags = resources.lookuptags
-
- currentfont = font
- rlmode = 0
- sweephead = { }
-
- local sequences = resources.sequences
- local done = false
- local datasets = otf.dataset(tfmdata,font,attr)
-
- local dirstack = { } -- could move outside function
-
- -- We could work on sub start-stop ranges instead but I wonder if there is that
- -- much speed gain (experiments showed that it made not much sense) and we need
- -- to keep track of directions anyway. Also at some point I want to play with
- -- font interactions and then we do need the full sweeps.
-
- -- Keeping track of the headnode is needed for devanagari (I generalized it a bit
- -- so that multiple cases are also covered.)
-
- -- We don't goto the next node of a disc node is created so that we can then treat
- -- the pre, post and replace. It's abit of a hack but works out ok for most cases.
-
- -- there can be less subtype and attr checking in the comprun etc helpers
-
- for s=1,#datasets do
- local dataset = datasets[s]
- featurevalue = dataset[1] -- todo: pass to function instead of using a global
- local attribute = dataset[2]
- local sequence = dataset[3] -- sequences[s] -- also dataset[5]
- local kind = dataset[4]
- ----- chain = dataset[5] -- sequence.chain or 0
- local rlparmode = 0
- local topstack = 0
- local success = false
- local typ = sequence.type
- local gpossing = typ == "gpos_single" or typ == "gpos_pair" -- maybe all of them
- local subtables = sequence.subtables
- local handler = handlers[typ]
- if typ == "gsub_reversecontextchain" then -- chain < 0
- -- this is a limited case, no special treatments like 'init' etc
- -- we need to get rid of this slide! probably no longer needed in latest luatex
- local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo
- while start do
- local id = getid(start)
- if id == glyph_code then
- if getfont(start) == font and getsubtype(start) < 256 then
- local a = getattr(start,0)
- if a then
- a = a == attr
- else
- a = true
- end
- if a then
- local char = getchar(start)
- for i=1,#subtables do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- -- todo: disc?
- head, start, success = handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,i)
- if success then
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- if start then start = getprev(start) end
- else
- start = getprev(start)
- end
- else
- start = getprev(start)
- end
- else
- start = getprev(start)
- end
- end
- else
- local ns = #subtables
- local start = head -- local ?
- rlmode = 0 -- to be checked ?
- if ns == 1 then -- happens often
- local lookupname = subtables[1]
- local lookupcache = lookuphash[lookupname]
- if not lookupcache then -- also check for empty cache
- report_missing_cache(typ,lookupname)
- else
-
- local function c_run(head) -- no need to check for 256 and attr probably also the same
- local done = false
- local start = sweephead[head]
- if start then
- sweephead[head] = nil
- else
- start = head
- end
- while start do
- local id = getid(start)
- if id ~= glyph_code then
- -- very unlikely
- start = getnext(start)
- elseif getfont(start) == font and getsubtype(start) < 256 then
- local a = getattr(start,0)
- if a then
- a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
- else
- a = not attribute or getprop(start,a_state) == attribute
- end
- if a then
- local lookupmatch = lookupcache[getchar(start)]
- if lookupmatch then
- -- sequence kan weg
- local ok
- head, start, ok = handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,1)
- if ok then
- done = true
- end
- end
- if start then start = getnext(start) end
- else
- start = getnext(start)
- end
- else
- return head, false
- end
- end
- if done then
- success = true -- needed in this subrun?
- end
- return head, done
- end
-
- local function t_run(start,stop)
- while start ~= stop do
- local id = getid(start)
- if id == glyph_code and getfont(start) == font and getsubtype(start) < 256 then
- local a = getattr(start,0)
- if a then
- a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
- else
- a = not attribute or getprop(start,a_state) == attribute
- end
- if a then
- local lookupmatch = lookupcache[getchar(start)]
- if lookupmatch then -- hm, hyphens can match (tlig) so we need to really check
- -- if we need more than ligatures we can outline the code and use functions
- local s = getnext(start)
- local l = nil
- while s do
- local lg = lookupmatch[getchar(s)]
- if lg then
- l = lg
- s = getnext(s)
- else
- break
- end
- end
- if l and l.ligature then
- return true
- end
- end
- end
- start = getnext(start)
- else
- break
- end
- end
- end
-
- local function d_run(prev) -- we can assume that prev and next are glyphs
- local a = getattr(prev,0)
- if a then
- a = (a == attr) and (not attribute or getprop(prev,a_state) == attribute)
- else
- a = not attribute or getprop(prev,a_state) == attribute
- end
- if a then
- local lookupmatch = lookupcache[getchar(prev)]
- if lookupmatch then
- -- sequence kan weg
- local h, d, ok = handler(head,prev,kind,lookupname,lookupmatch,sequence,lookuphash,1)
- if ok then
- done = true
- success = true
- end
- end
- end
- end
-
- local function k_run(sub,injection,last)
- local a = getattr(sub,0)
- if a then
- a = (a == attr) and (not attribute or getprop(sub,a_state) == attribute)
- else
- a = not attribute or getprop(sub,a_state) == attribute
- end
- if a then
- -- sequence kan weg
- for n in traverse_nodes(sub) do -- only gpos
- if n == last then
- break
- end
- local id = getid(n)
- if id == glyph_code then
- local lookupmatch = lookupcache[getchar(n)]
- if lookupmatch then
- local h, d, ok = handler(sub,n,kind,lookupname,lookupmatch,sequence,lookuphash,1,injection)
- if ok then
- done = true
- success = true
- end
- end
- else
- -- message
- end
- end
- end
- end
-
- while start do
- local id = getid(start)
- if id == glyph_code then
- if getfont(start) == font and getsubtype(start) < 256 then -- why a 256 test ...
- local a = getattr(start,0)
- if a then
- a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
- else
- a = not attribute or getprop(start,a_state) == attribute
- end
- if a then
- local char = getchar(start)
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- -- sequence kan weg
- local ok
- head, start, ok = handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,1)
- if ok then
- success = true
- elseif gpossing and zwnjruns and char == zwnj then
- discrun(start,d_run)
- end
- elseif gpossing and zwnjruns and char == zwnj then
- discrun(start,d_run)
- end
- if start then start = getnext(start) end
- else
- start = getnext(start)
- end
- else
- start = getnext(start)
- end
- elseif id == disc_code then
- if gpossing then
- kernrun(start,k_run)
- start = getnext(start)
- elseif typ == "gsub_ligature" then
- start = testrun(start,t_run,c_run)
- else
- comprun(start,c_run)
- start = getnext(start)
- end
- elseif id == math_code then
- start = getnext(end_of_math(start))
- elseif id == dir_code then
- local dir = getfield(start,"dir")
- if dir == "+TLT" then
- topstack = topstack + 1
- dirstack[topstack] = dir
- rlmode = 1
- elseif dir == "+TRT" then
- topstack = topstack + 1
- dirstack[topstack] = dir
- rlmode = -1
- elseif dir == "-TLT" or dir == "-TRT" then
- topstack = topstack - 1
- rlmode = dirstack[topstack] == "+TRT" and -1 or 1
- else
- rlmode = rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
- end
- start = getnext(start)
- elseif id == localpar_code then
- local dir = getfield(start,"dir")
- if dir == "TRT" then
- rlparmode = -1
- elseif dir == "TLT" then
- rlparmode = 1
- else
- rlparmode = 0
- end
- -- one might wonder if the par dir should be looked at, so we might as well drop the next line
- rlmode = rlparmode
- if trace_directions then
- report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
- end
- start = getnext(start)
- else
- start = getnext(start)
- end
- end
- end
-
- else
-
- local function c_run(head)
- local done = false
- local start = sweephead[head]
- if start then
- sweephead[head] = nil
- else
- start = head
- end
- while start do
- local id = getid(start)
- if id ~= glyph_code then
- -- very unlikely
- start = getnext(start)
- elseif getfont(start) == font and getsubtype(start) < 256 then
- local a = getattr(start,0)
- if a then
- a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
- else
- a = not attribute or getprop(start,a_state) == attribute
- end
- if a then
- local char = getchar(start)
- for i=1,ns do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local ok
- head, start, ok = handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,i)
- if ok then
- done = true
- break
- elseif not start then
- -- don't ask why ... shouldn't happen
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- if start then start = getnext(start) end
- else
- start = getnext(start)
- end
- else
- return head, false
- end
- end
- if done then
- success = true
- end
- return head, done
- end
-
- local function d_run(prev)
- local a = getattr(prev,0)
- if a then
- a = (a == attr) and (not attribute or getprop(prev,a_state) == attribute)
- else
- a = not attribute or getprop(prev,a_state) == attribute
- end
- if a then
- -- brr prev can be disc
- local char = getchar(prev)
- for i=1,ns do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local h, d, ok = handler(head,prev,kind,lookupname,lookupmatch,sequence,lookuphash,i)
- if ok then
- done = true
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- end
- end
-
- local function k_run(sub,injection,last)
- local a = getattr(sub,0)
- if a then
- a = (a == attr) and (not attribute or getprop(sub,a_state) == attribute)
- else
- a = not attribute or getprop(sub,a_state) == attribute
- end
- if a then
- for n in traverse_nodes(sub) do -- only gpos
- if n == last then
- break
- end
- local id = getid(n)
- if id == glyph_code then
- local char = getchar(n)
- for i=1,ns do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- local h, d, ok = handler(head,n,kind,lookupname,lookupmatch,sequence,lookuphash,i,injection)
- if ok then
- done = true
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- else
- -- message
- end
- end
- end
- end
-
- local function t_run(start,stop)
- while start ~= stop do
- local id = getid(start)
- if id == glyph_code and getfont(start) == font and getsubtype(start) < 256 then
- local a = getattr(start,0)
- if a then
- a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
- else
- a = not attribute or getprop(start,a_state) == attribute
- end
- if a then
- local char = getchar(start)
- for i=1,ns do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- -- if we need more than ligatures we can outline the code and use functions
- local s = getnext(start)
- local l = nil
- while s do
- local lg = lookupmatch[getchar(s)]
- if lg then
- l = lg
- s = getnext(s)
- else
- break
- end
- end
- if l and l.ligature then
- return true
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- end
- start = getnext(start)
- else
- break
- end
- end
- end
-
- while start do
- local id = getid(start)
- if id == glyph_code then
- if getfont(start) == font and getsubtype(start) < 256 then
- local a = getattr(start,0)
- if a then
- a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
- else
- a = not attribute or getprop(start,a_state) == attribute
- end
- if a then
- for i=1,ns do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local char = getchar(start)
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local ok
- head, start, ok = handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,i)
- if ok then
- success = true
- break
- elseif not start then
- -- don't ask why ... shouldn't happen
- break
- elseif gpossing and zwnjruns and char == zwnj then
- discrun(start,d_run)
- end
- elseif gpossing and zwnjruns and char == zwnj then
- discrun(start,d_run)
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- if start then start = getnext(start) end
- else
- start = getnext(start)
- end
- else
- start = getnext(start)
- end
- elseif id == disc_code then
- if gpossing then
- kernrun(start,k_run)
- start = getnext(start)
- elseif typ == "gsub_ligature" then
- start = testrun(start,t_run,c_run)
- else
- comprun(start,c_run)
- start = getnext(start)
- end
- elseif id == math_code then
- start = getnext(end_of_math(start))
- elseif id == dir_code then
- local dir = getfield(start,"dir")
- if dir == "+TLT" then
- topstack = topstack + 1
- dirstack[topstack] = dir
- rlmode = 1
- elseif dir == "+TRT" then
- topstack = topstack + 1
- dirstack[topstack] = dir
- rlmode = -1
- elseif dir == "-TLT" or dir == "-TRT" then
- topstack = topstack - 1
- rlmode = dirstack[topstack] == "+TRT" and -1 or 1
- else
- rlmode = rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
- end
- start = getnext(start)
- elseif id == localpar_code then
- local dir = getfield(start,"dir")
- if dir == "TRT" then
- rlparmode = -1
- elseif dir == "TLT" then
- rlparmode = 1
- else
- rlparmode = 0
- end
- rlmode = rlparmode
- if trace_directions then
- report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
- end
- start = getnext(start)
- else
- start = getnext(start)
- end
- end
- end
- end
- if success then
- done = true
- end
- if trace_steps then -- ?
- registerstep(head)
- end
-
- end
-
- head = tonode(head)
-
- return head, done
-end
-
--- this might move to the loader
-
-local function generic(lookupdata,lookupname,unicode,lookuphash)
- local target = lookuphash[lookupname]
- if target then
- target[unicode] = lookupdata
- else
- lookuphash[lookupname] = { [unicode] = lookupdata }
- end
-end
-
-local function ligature(lookupdata,lookupname,unicode,lookuphash)
- local target = lookuphash[lookupname]
- if not target then
- target = { }
- lookuphash[lookupname] = target
- end
- for i=1,#lookupdata do
- local li = lookupdata[i]
- local tu = target[li]
- if not tu then
- tu = { }
- target[li] = tu
- end
- target = tu
- end
- target.ligature = unicode
-end
-
--- this is not ok .. it doesn't work for the old loader with new loader extension
--- specs
-
-local function pair(lookupdata,lookupname,unicode,lookuphash)
- local target = lookuphash[lookupname]
- if not target then
- target = { }
- lookuphash[lookupname] = target
- end
- local others = target[unicode]
- local paired = lookupdata[1]
- if not paired then
- -- something is wrong
- elseif others then
- others[paired] = lookupdata
- else
- others = { [paired] = lookupdata }
- target[unicode] = others
- end
-end
-
-local action = {
- substitution = generic,
- multiple = generic,
- alternate = generic,
- position = generic,
- ligature = ligature,
- pair = pair,
- kern = pair,
-}
-
-local function prepare_lookups(tfmdata)
-
- local rawdata = tfmdata.shared.rawdata
- local resources = rawdata.resources
- local lookuphash = resources.lookuphash
- local anchor_to_lookup = resources.anchor_to_lookup
- local lookup_to_anchor = resources.lookup_to_anchor
- local lookuptypes = resources.lookuptypes
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local duplicates = resources.duplicates
-
- -- we cannot free the entries in the descriptions as sometimes we access
- -- then directly (for instance anchors) ... selectively freeing does save
- -- much memory as it's only a reference to a table and the slot in the
- -- description hash is not freed anyway
-
- -- we can delay this using metatables so that we don't make the hashes for
- -- features we don't use but then we need to loop over the characters
- -- many times so we gain nothing
-
- for unicode, character in next, characters do -- we cannot loop over descriptions !
-
- local description = descriptions[unicode]
-
- if description then
-
- local lookups = description.slookups
- if lookups then
- for lookupname, lookupdata in next, lookups do
- action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash,duplicates)
- end
- end
-
- local lookups = description.mlookups
- if lookups then
- for lookupname, lookuplist in next, lookups do
- local lookuptype = lookuptypes[lookupname]
- for l=1,#lookuplist do
- local lookupdata = lookuplist[l]
- action[lookuptype](lookupdata,lookupname,unicode,lookuphash,duplicates)
- end
- end
- end
-
- local list = description.kerns
- if list then
- for lookup, krn in next, list do -- ref to glyph, saves lookup
- local target = lookuphash[lookup]
- if target then
- target[unicode] = krn
- else
- lookuphash[lookup] = { [unicode] = krn }
- end
- end
- end
-
- local list = description.anchors
- if list then
- for typ, anchors in next, list do -- types
- if typ == "mark" or typ == "cexit" then -- or entry?
- for name, anchor in next, anchors do
- local lookups = anchor_to_lookup[name]
- if lookups then
- for lookup in next, lookups do
- local target = lookuphash[lookup]
- if target then
- target[unicode] = anchors
- else
- lookuphash[lookup] = { [unicode] = anchors }
- end
- end
- end
- end
- end
- end
- end
-
- end
-
- end
-
-end
-
--- so far
-
-local function split(replacement,original)
- local result = { }
- for i=1,#replacement do
- result[original[i]] = replacement[i]
- end
- return result
-end
-
-local valid = { -- does contextpos work?
- coverage = { chainsub = true, chainpos = true, contextsub = true, contextpos = true },
- reversecoverage = { reversesub = true },
- glyphs = { chainsub = true, chainpos = true, contextsub = true, contextpos = true },
-}
-
-local function prepare_contextchains(tfmdata)
- local rawdata = tfmdata.shared.rawdata
- local resources = rawdata.resources
- local lookuphash = resources.lookuphash
- local lookuptags = resources.lookuptags
- local lookups = rawdata.lookups
- if lookups then
- for lookupname, lookupdata in next, rawdata.lookups do
- local lookuptype = lookupdata.type
- if lookuptype then
- local rules = lookupdata.rules
- if rules then
- local format = lookupdata.format
- local validformat = valid[format]
- if not validformat then
- report_prepare("unsupported format %a",format)
- elseif not validformat[lookuptype] then
- -- todo: dejavu-serif has one (but i need to see what use it has)
- report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookuptags[lookupname])
- else
- local contexts = lookuphash[lookupname]
- if not contexts then
- contexts = { }
- lookuphash[lookupname] = contexts
- end
- local t, nt = { }, 0
- for nofrules=1,#rules do
- local rule = rules[nofrules]
- local current = rule.current
- local before = rule.before
- local after = rule.after
- local replacements = rule.replacements
- local sequence = { }
- local nofsequences = 0
- -- Eventually we can store start, stop and sequence in the cached file
- -- but then less sharing takes place so best not do that without a lot
- -- of profiling so let's forget about it.
- if before then
- for n=1,#before do
- nofsequences = nofsequences + 1
- sequence[nofsequences] = before[n]
- end
- end
- local start = nofsequences + 1
- for n=1,#current do
- nofsequences = nofsequences + 1
- sequence[nofsequences] = current[n]
- end
- local stop = nofsequences
- if after then
- for n=1,#after do
- nofsequences = nofsequences + 1
- sequence[nofsequences] = after[n]
- end
- end
- if sequence[1] then
- -- Replacements only happen with reverse lookups as they are single only. We
- -- could pack them into current (replacement value instead of true) and then
- -- use sequence[start] instead but it's somewhat ugly.
- nt = nt + 1
- t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements }
- for unic in next, sequence[start] do
- local cu = contexts[unic]
- if not cu then
- contexts[unic] = t
- end
- end
- end
- end
- end
- else
- -- no rules
- end
- else
- report_prepare("missing lookuptype for lookupname %a",lookuptags[lookupname])
- end
- end
- end
-end
-
--- we can consider lookuphash == false (initialized but empty) vs lookuphash == table
-
-local function featuresinitializer(tfmdata,value)
- if true then -- value then
- -- beware we need to use the topmost properties table
- local rawdata = tfmdata.shared.rawdata
- local properties = rawdata.properties
- if not properties.initialized then
- local starttime = trace_preparing and os.clock()
- local resources = rawdata.resources
- resources.lookuphash = resources.lookuphash or { }
- prepare_contextchains(tfmdata)
- prepare_lookups(tfmdata)
- properties.initialized = true
- if trace_preparing then
- report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname)
- end
- end
- end
-end
-
-registerotffeature {
- name = "features",
- description = "features",
- default = true,
- initializers = {
- position = 1,
- node = featuresinitializer,
- },
- processors = {
- node = featuresprocessor,
- }
-}
-
--- This can be used for extra handlers, but should be used with care!
-
-otf.handlers = handlers
diff --git a/tex/context/base/mkiv/font-oto.lua b/tex/context/base/mkiv/font-oto.lua
index 13568799b..9c42a54bd 100644
--- a/tex/context/base/mkiv/font-oto.lua
+++ b/tex/context/base/mkiv/font-oto.lua
@@ -6,10 +6,6 @@ if not modules then modules = { } end modules ['font-oto'] = { -- original tex
license = "see context related readme files"
}
--- This is a version of font-otb adapted to the new fontloader code. We used to have two
--- base initialization methods but now we have only one. This means that instead of the
--- old default (independent) we now use the one more similar to node node (shared).
-
local concat, unpack = table.concat, table.unpack
local insert, remove = table.insert, table.remove
local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
diff --git a/tex/context/base/mkiv/font-otp.lua b/tex/context/base/mkiv/font-otp.lua
deleted file mode 100644
index c52e574b9..000000000
--- a/tex/context/base/mkiv/font-otp.lua
+++ /dev/null
@@ -1,894 +0,0 @@
-if not modules then modules = { } end modules ['font-otp'] = {
- version = 1.001,
- comment = "companion to font-otf.lua (packing)",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- todo: pack math (but not that much to share)
---
--- pitfall 5.2: hashed tables can suddenly become indexed with nil slots
---
--- unless we sort all hashes we can get a different pack order (no big deal but size can differ)
-
-local next, type, tostring = next, type, tostring
-local sort, concat = table.sort, table.concat
-
-local trace_packing = false trackers.register("otf.packing", function(v) trace_packing = v end)
-local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
-
-local report_otf = logs.reporter("fonts","otf loading")
-
--- also used in other scripts so we need to check some tables:
-
-fonts = fonts or { }
-
-local handlers = fonts.handlers or { }
-fonts.handlers = handlers
-
-local otf = handlers.otf or { }
-handlers.otf = otf
-
-local glists = otf.glists or { "gsub", "gpos" }
-otf.glists = glists
-
-local criterium = 1
-local threshold = 0
-
-local function tabstr_normal(t)
- local s = { }
- local n = 0
- for k, v in next, t do
- n = n + 1
- if type(v) == "table" then
- s[n] = k .. ">" .. tabstr_normal(v)
- elseif v == true then
- s[n] = k .. "+" -- "=true"
- elseif v then
- s[n] = k .. "=" .. v
- else
- s[n] = k .. "-" -- "=false"
- end
- end
- if n == 0 then
- return ""
- elseif n == 1 then
- return s[1]
- else
- sort(s) -- costly but needed (occasional wrong hit otherwise)
- return concat(s,",")
- end
-end
-
-local function tabstr_flat(t)
- local s = { }
- local n = 0
- for k, v in next, t do
- n = n + 1
- s[n] = k .. "=" .. v
- end
- if n == 0 then
- return ""
- elseif n == 1 then
- return s[1]
- else
- sort(s) -- costly but needed (occasional wrong hit otherwise)
- return concat(s,",")
- end
-end
-
-local function tabstr_mixed(t) -- indexed
- local s = { }
- local n = #t
- if n == 0 then
- return ""
- elseif n == 1 then
- local k = t[1]
- if k == true then
- return "++" -- we need to distinguish from "true"
- elseif k == false then
- return "--" -- we need to distinguish from "false"
- else
- return tostring(k) -- number or string
- end
- else
- for i=1,n do
- local k = t[i]
- if k == true then
- s[i] = "++" -- we need to distinguish from "true"
- elseif k == false then
- s[i] = "--" -- we need to distinguish from "false"
- else
- s[i] = k -- number or string
- end
- end
- return concat(s,",")
- end
-end
-
-local function tabstr_boolean(t)
- local s = { }
- local n = 0
- for k, v in next, t do
- n = n + 1
- if v then
- s[n] = k .. "+"
- else
- s[n] = k .. "-"
- end
- end
- if n == 0 then
- return ""
- elseif n == 1 then
- return s[1]
- else
- sort(s) -- costly but needed (occasional wrong hit otherwise)
- return concat(s,",")
- end
-end
-
--- tabstr_boolean_x = tabstr_boolean
-
--- tabstr_boolean = function(t)
--- local a = tabstr_normal(t)
--- local b = tabstr_boolean_x(t)
--- print(a)
--- print(b)
--- return b
--- end
-
--- beware: we cannot unpack and repack the same table because then sharing
--- interferes (we could catch this if needed) .. so for now: save, reload
--- and repack in such cases (never needed anyway) .. a tricky aspect is that
--- we then need to sort more thanks to random hashing
-
-function otf.packdata(data)
-
- if data then
- -- stripdata(data)
- local h, t, c = { }, { }, { }
- local hh, tt, cc = { }, { }, { }
- local nt, ntt = 0, 0
- local function pack_normal(v)
- local tag = tabstr_normal(v)
- local ht = h[tag]
- if ht then
- c[ht] = c[ht] + 1
- return ht
- else
- nt = nt + 1
- t[nt] = v
- h[tag] = nt
- c[nt] = 1
- return nt
- end
- end
- local function pack_flat(v)
- local tag = tabstr_flat(v)
- local ht = h[tag]
- if ht then
- c[ht] = c[ht] + 1
- return ht
- else
- nt = nt + 1
- t[nt] = v
- h[tag] = nt
- c[nt] = 1
- return nt
- end
- end
- local function pack_boolean(v)
- local tag = tabstr_boolean(v)
- local ht = h[tag]
- if ht then
- c[ht] = c[ht] + 1
- return ht
- else
- nt = nt + 1
- t[nt] = v
- h[tag] = nt
- c[nt] = 1
- return nt
- end
- end
- local function pack_indexed(v)
- local tag = concat(v," ")
- local ht = h[tag]
- if ht then
- c[ht] = c[ht] + 1
- return ht
- else
- nt = nt + 1
- t[nt] = v
- h[tag] = nt
- c[nt] = 1
- return nt
- end
- end
- local function pack_mixed(v)
- local tag = tabstr_mixed(v)
- local ht = h[tag]
- if ht then
- c[ht] = c[ht] + 1
- return ht
- else
- nt = nt + 1
- t[nt] = v
- h[tag] = nt
- c[nt] = 1
- return nt
- end
- end
- local function pack_final(v)
- -- v == number
- if c[v] <= criterium then
- return t[v]
- else
- -- compact hash
- local hv = hh[v]
- if hv then
- return hv
- else
- ntt = ntt + 1
- tt[ntt] = t[v]
- hh[v] = ntt
- cc[ntt] = c[v]
- return ntt
- end
- end
- end
- local function success(stage,pass)
- if nt == 0 then
- if trace_loading or trace_packing then
- report_otf("pack quality: nothing to pack")
- end
- return false
- elseif nt >= threshold then
- local one, two, rest = 0, 0, 0
- if pass == 1 then
- for k,v in next, c do
- if v == 1 then
- one = one + 1
- elseif v == 2 then
- two = two + 1
- else
- rest = rest + 1
- end
- end
- else
- for k,v in next, cc do
- if v > 20 then
- rest = rest + 1
- elseif v > 10 then
- two = two + 1
- else
- one = one + 1
- end
- end
- data.tables = tt
- end
- if trace_loading or trace_packing then
- report_otf("pack quality: stage %s, pass %s, %s packed, 1-10:%s, 11-20:%s, rest:%s (criterium: %s)", stage, pass, one+two+rest, one, two, rest, criterium)
- end
- return true
- else
- if trace_loading or trace_packing then
- report_otf("pack quality: stage %s, pass %s, %s packed, aborting pack (threshold: %s)", stage, pass, nt, threshold)
- end
- return false
- end
- end
- local function packers(pass)
- if pass == 1 then
- return pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed
- else
- return pack_final, pack_final, pack_final, pack_final, pack_final
- end
- end
- local resources = data.resources
- local lookuptypes = resources.lookuptypes
- for pass=1,2 do
- if trace_packing then
- report_otf("start packing: stage 1, pass %s",pass)
- end
- local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass)
- for unicode, description in next, data.descriptions do
- local boundingbox = description.boundingbox
- if boundingbox then
- description.boundingbox = pack_indexed(boundingbox)
- end
- local slookups = description.slookups
- if slookups then
- for tag, slookup in next, slookups do
- local what = lookuptypes[tag]
- if what == "pair" then
- local t = slookup[2] if t then slookup[2] = pack_indexed(t) end
- local t = slookup[3] if t then slookup[3] = pack_indexed(t) end
- elseif what ~= "substitution" then
- slookups[tag] = pack_indexed(slookup) -- true is new
- end
- end
- end
- local mlookups = description.mlookups
- if mlookups then
- for tag, mlookup in next, mlookups do
- local what = lookuptypes[tag]
- if what == "pair" then
- for i=1,#mlookup do
- local lookup = mlookup[i]
- local t = lookup[2] if t then lookup[2] = pack_indexed(t) end
- local t = lookup[3] if t then lookup[3] = pack_indexed(t) end
- end
- elseif what ~= "substitution" then
- for i=1,#mlookup do
- mlookup[i] = pack_indexed(mlookup[i]) -- true is new
- end
- end
- end
- end
- local kerns = description.kerns
- if kerns then
- for tag, kern in next, kerns do
- kerns[tag] = pack_flat(kern)
- end
- end
- local math = description.math
- if math then
- local kerns = math.kerns
- if kerns then
- for tag, kern in next, kerns do
- kerns[tag] = pack_normal(kern)
- end
- end
- end
- local anchors = description.anchors
- if anchors then
- for what, anchor in next, anchors do
- if what == "baselig" then
- for _, a in next, anchor do
- for k=1,#a do
- a[k] = pack_indexed(a[k])
- end
- end
- else
- for k, v in next, anchor do
- anchor[k] = pack_indexed(v)
- end
- end
- end
- end
- local altuni = description.altuni
- if altuni then
- for i=1,#altuni do
- altuni[i] = pack_flat(altuni[i])
- end
- end
- end
- local lookups = data.lookups
- if lookups then
- for _, lookup in next, lookups do
- local rules = lookup.rules
- if rules then
- for i=1,#rules do
- local rule = rules[i]
- local r = rule.before if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end
- local r = rule.after if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end
- local r = rule.current if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end
- local r = rule.replacements if r then rule.replacements = pack_flat (r) end -- can have holes
- local r = rule.lookups if r then rule.lookups = pack_indexed(r) end -- can have ""
- -- local r = rule.lookups if r then rule.lookups = pack_flat(r) end -- can have holes (already taken care of some cases)
- end
- end
- end
- end
- local anchor_to_lookup = resources.anchor_to_lookup
- if anchor_to_lookup then
- for anchor, lookup in next, anchor_to_lookup do
- anchor_to_lookup[anchor] = pack_normal(lookup)
- end
- end
- local lookup_to_anchor = resources.lookup_to_anchor
- if lookup_to_anchor then
- for lookup, anchor in next, lookup_to_anchor do
- lookup_to_anchor[lookup] = pack_normal(anchor)
- end
- end
- local sequences = resources.sequences
- if sequences then
- for feature, sequence in next, sequences do
- local flags = sequence.flags
- if flags then
- sequence.flags = pack_normal(flags)
- end
- local subtables = sequence.subtables
- if subtables then
- sequence.subtables = pack_normal(subtables)
- end
- local features = sequence.features
- if features then
- for script, feature in next, features do
- features[script] = pack_normal(feature)
- end
- end
- local order = sequence.order
- if order then
- sequence.order = pack_indexed(order)
- end
- local markclass = sequence.markclass
- if markclass then
- sequence.markclass = pack_boolean(markclass)
- end
- end
- end
- local lookups = resources.lookups
- if lookups then
- for name, lookup in next, lookups do
- local flags = lookup.flags
- if flags then
- lookup.flags = pack_normal(flags)
- end
- local subtables = lookup.subtables
- if subtables then
- lookup.subtables = pack_normal(subtables)
- end
- end
- end
- local features = resources.features
- if features then
- for _, what in next, glists do
- local list = features[what]
- if list then
- for feature, spec in next, list do
- list[feature] = pack_normal(spec)
- end
- end
- end
- end
- if not success(1,pass) then
- return
- end
- end
- if nt > 0 then
- for pass=1,2 do
- if trace_packing then
- report_otf("start packing: stage 2, pass %s",pass)
- end
- local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass)
- for unicode, description in next, data.descriptions do
- local kerns = description.kerns
- if kerns then
- description.kerns = pack_normal(kerns)
- end
- local math = description.math
- if math then
- local kerns = math.kerns
- if kerns then
- math.kerns = pack_normal(kerns)
- end
- end
- local anchors = description.anchors
- if anchors then
- description.anchors = pack_normal(anchors)
- end
- local mlookups = description.mlookups
- if mlookups then
- for tag, mlookup in next, mlookups do
- mlookups[tag] = pack_normal(mlookup)
- end
- end
- local altuni = description.altuni
- if altuni then
- description.altuni = pack_normal(altuni)
- end
- end
- local lookups = data.lookups
- if lookups then
- for _, lookup in next, lookups do
- local rules = lookup.rules
- if rules then
- for i=1,#rules do -- was next loop
- local rule = rules[i]
- local r = rule.before if r then rule.before = pack_normal(r) end
- local r = rule.after if r then rule.after = pack_normal(r) end
- local r = rule.current if r then rule.current = pack_normal(r) end
- end
- end
- end
- end
- local sequences = resources.sequences
- if sequences then
- for feature, sequence in next, sequences do
- sequence.features = pack_normal(sequence.features)
- end
- end
- if not success(2,pass) then
- -- return
- end
- end
-
- for pass=1,2 do
- local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass)
- for unicode, description in next, data.descriptions do
- local slookups = description.slookups
- if slookups then
- description.slookups = pack_normal(slookups)
- end
- local mlookups = description.mlookups
- if mlookups then
- description.mlookups = pack_normal(mlookups)
- end
- end
- end
-
- end
- end
-end
-
-local unpacked_mt = {
- __index =
- function(t,k)
- t[k] = false
- return k -- next time true
- end
-}
-
-function otf.unpackdata(data)
-
- if data then
- local tables = data.tables
- if tables then
- local resources = data.resources
- local lookuptypes = resources.lookuptypes
- local unpacked = { }
- setmetatable(unpacked,unpacked_mt)
- for unicode, description in next, data.descriptions do
- local tv = tables[description.boundingbox]
- if tv then
- description.boundingbox = tv
- end
- local slookups = description.slookups
- if slookups then
- local tv = tables[slookups]
- if tv then
- description.slookups = tv
- slookups = unpacked[tv]
- end
- if slookups then
- for tag, lookup in next, slookups do
- local what = lookuptypes[tag]
- if what == "pair" then
- local tv = tables[lookup[2]]
- if tv then
- lookup[2] = tv
- end
- local tv = tables[lookup[3]]
- if tv then
- lookup[3] = tv
- end
- elseif what ~= "substitution" then
- local tv = tables[lookup]
- if tv then
- slookups[tag] = tv
- end
- end
- end
- end
- end
- local mlookups = description.mlookups
- if mlookups then
- local tv = tables[mlookups]
- if tv then
- description.mlookups = tv
- mlookups = unpacked[tv]
- end
- if mlookups then
- for tag, list in next, mlookups do
- local tv = tables[list]
- if tv then
- mlookups[tag] = tv
- list = unpacked[tv]
- end
- if list then
- local what = lookuptypes[tag]
- if what == "pair" then
- for i=1,#list do
- local lookup = list[i]
- local tv = tables[lookup[2]]
- if tv then
- lookup[2] = tv
- end
- local tv = tables[lookup[3]]
- if tv then
- lookup[3] = tv
- end
- end
- elseif what ~= "substitution" then
- for i=1,#list do
- local tv = tables[list[i]]
- if tv then
- list[i] = tv
- end
- end
- end
- end
- end
- end
- end
- local kerns = description.kerns
- if kerns then
- local tm = tables[kerns]
- if tm then
- description.kerns = tm
- kerns = unpacked[tm]
- end
- if kerns then
- for k, kern in next, kerns do
- local tv = tables[kern]
- if tv then
- kerns[k] = tv
- end
- end
- end
- end
- local math = description.math
- if math then
- local kerns = math.kerns
- if kerns then
- local tm = tables[kerns]
- if tm then
- math.kerns = tm
- kerns = unpacked[tm]
- end
- if kerns then
- for k, kern in next, kerns do
- local tv = tables[kern]
- if tv then
- kerns[k] = tv
- end
- end
- end
- end
- end
- local anchors = description.anchors
- if anchors then
- local ta = tables[anchors]
- if ta then
- description.anchors = ta
- anchors = unpacked[ta]
- end
- if anchors then
- for tag, anchor in next, anchors do
- if tag == "baselig" then
- for _, list in next, anchor do
- for i=1,#list do
- local tv = tables[list[i]]
- if tv then
- list[i] = tv
- end
- end
- end
- else
- for a, data in next, anchor do
- local tv = tables[data]
- if tv then
- anchor[a] = tv
- end
- end
- end
- end
- end
- end
- local altuni = description.altuni
- if altuni then
- local altuni = tables[altuni]
- if altuni then
- description.altuni = altuni
- for i=1,#altuni do
- local tv = tables[altuni[i]]
- if tv then
- altuni[i] = tv
- end
- end
- end
- end
- end
- local lookups = data.lookups
- if lookups then
- for _, lookup in next, lookups do
- local rules = lookup.rules
- if rules then
- for i=1,#rules do -- was next loop
- local rule = rules[i]
- local before = rule.before
- if before then
- local tv = tables[before]
- if tv then
- rule.before = tv
- before = unpacked[tv]
- end
- if before then
- for i=1,#before do
- local tv = tables[before[i]]
- if tv then
- before[i] = tv
- end
- end
- end
- end
- local after = rule.after
- if after then
- local tv = tables[after]
- if tv then
- rule.after = tv
- after = unpacked[tv]
- end
- if after then
- for i=1,#after do
- local tv = tables[after[i]]
- if tv then
- after[i] = tv
- end
- end
- end
- end
- local current = rule.current
- if current then
- local tv = tables[current]
- if tv then
- rule.current = tv
- current = unpacked[tv]
- end
- if current then
- for i=1,#current do
- local tv = tables[current[i]]
- if tv then
- current[i] = tv
- end
- end
- end
- end
- local replacements = rule.replacements
- if replacements then
- local tv = tables[replacements]
- if tv then
- rule.replacements = tv
- end
- end
- -- local fore = rule.fore
- -- if fore then
- -- local tv = tables[fore]
- -- if tv then
- -- rule.fore = tv
- -- end
- -- end
- -- local back = rule.back
- -- if back then
- -- local tv = tables[back]
- -- if tv then
- -- rule.back = tv
- -- end
- -- end
- -- local names = rule.names
- -- if names then
- -- local tv = tables[names]
- -- if tv then
- -- rule.names = tv
- -- end
- -- end
- --
- local lookups = rule.lookups
- if lookups then
- local tv = tables[lookups]
- if tv then
- rule.lookups = tv
- end
- end
- end
- end
- end
- end
- local anchor_to_lookup = resources.anchor_to_lookup
- if anchor_to_lookup then
- for anchor, lookup in next, anchor_to_lookup do
- local tv = tables[lookup]
- if tv then
- anchor_to_lookup[anchor] = tv
- end
- end
- end
- local lookup_to_anchor = resources.lookup_to_anchor
- if lookup_to_anchor then
- for lookup, anchor in next, lookup_to_anchor do
- local tv = tables[anchor]
- if tv then
- lookup_to_anchor[lookup] = tv
- end
- end
- end
- local ls = resources.sequences
- if ls then
- for _, feature in next, ls do
- local flags = feature.flags
- if flags then
- local tv = tables[flags]
- if tv then
- feature.flags = tv
- end
- end
- local subtables = feature.subtables
- if subtables then
- local tv = tables[subtables]
- if tv then
- feature.subtables = tv
- end
- end
- local features = feature.features
- if features then
- local tv = tables[features]
- if tv then
- feature.features = tv
- features = unpacked[tv]
- end
- if features then
- for script, data in next, features do
- local tv = tables[data]
- if tv then
- features[script] = tv
- end
- end
- end
- end
- local order = feature.order
- if order then
- local tv = tables[order]
- if tv then
- feature.order = tv
- end
- end
- local markclass = feature.markclass
- if markclass then
- local tv = tables[markclass]
- if tv then
- feature.markclass = tv
- end
- end
- end
- end
- local lookups = resources.lookups
- if lookups then
- for _, lookup in next, lookups do
- local flags = lookup.flags
- if flags then
- local tv = tables[flags]
- if tv then
- lookup.flags = tv
- end
- end
- local subtables = lookup.subtables
- if subtables then
- local tv = tables[subtables]
- if tv then
- lookup.subtables = tv
- end
- end
- end
- end
- local features = resources.features
- if features then
- for _, what in next, glists do
- local feature = features[what]
- if feature then
- for tag, spec in next, feature do
- local tv = tables[spec]
- if tv then
- feature[tag] = tv
- end
- end
- end
- end
- end
- data.tables = nil
- end
- end
-end
diff --git a/tex/context/base/mkiv/font-ott.lua b/tex/context/base/mkiv/font-ott.lua
index cba3758dc..9f9900dc2 100644
--- a/tex/context/base/mkiv/font-ott.lua
+++ b/tex/context/base/mkiv/font-ott.lua
@@ -1,6 +1,6 @@
if not modules then modules = { } end modules ['font-ott'] = {
version = 1.001,
- comment = "companion to font-otf.lua (tables)",
+ comment = "companion to font-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files",
diff --git a/tex/context/base/mkiv/m-oldotf.mkiv b/tex/context/base/mkiv/m-oldotf.mkiv
deleted file mode 100644
index f860df712..000000000
--- a/tex/context/base/mkiv/m-oldotf.mkiv
+++ /dev/null
@@ -1,77 +0,0 @@
-%D \module
-%D [ file=m-oldotf,
-%D version=2015.07.08,
-%D title=\CONTEXT\ Extra Modules,
-%D subtitle=Traditional OTF Loader,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-\unprotect
-
-\startluacode
- local files = {
- "font-otf",
- "font-otb",
- "font-inj",
--- "font-ota",
- "font-otn",
- "font-otd",
- "font-otp",
- "font-otc",
- "font-oth",
- "font-odv",
- "font-one",
- "font-map",
- "font-fbk",
- }
- local report = logs.reporter("oldotf")
- local findfile = resolvers.findfile
- local addsuffix = file.addsuffix
- report()
- report("using traditional font loader code")
- report()
- for i=1,#files do
- local foundfile = findfile(addsuffix(files[i],"lua"))
- if foundfile and foundfile ~= "" then
- report("loading %a",foundfile)
- dofile(foundfile)
- end
- end
- report()
-
- -- needed for testing:
-
- local nuts = nodes.nuts
- local copy_node = nuts.copy
- local kern = nuts.pool.register(nuts.pool.kern())
- local setfield = nuts.setfield
-
- nuts.setattr(kern,attributes.private('fontkern'),1) -- we can have several, attributes are shared
-
- nodes.injections.installnewkern(function(k)
- local c = copy_node(kern)
- setfield(c,"kern",k)
- return c
- end)
-
- directives.register("fonts.injections.fontkern", function(v) setfield(kern,"subtype",v and 0 or 1) end)
-
- local fonts = fonts
- local handlers = fonts.handlers
- local otf = handlers.otf -- brrr
- local afm = handlers.afm -- brrr
- local getters = fonts.getters
-
- getters.kern .opentype = otf.getkern
- getters.substitution.opentype = otf.getsubstitution
- getters.alternate .opentype = otf.getalternate
- getters.multiple .opentype = otf.getmultiple
-
-\stopluacode
-
-\protect \endinput
diff --git a/tex/context/base/mkiv/math-fbk.lua b/tex/context/base/mkiv/math-fbk.lua
index 7621b6525..5a6a42e26 100644
--- a/tex/context/base/mkiv/math-fbk.lua
+++ b/tex/context/base/mkiv/math-fbk.lua
@@ -35,111 +35,117 @@ local lastmathids = fonts.hashes.lastmathids
-- in context define three sizes but pass them later i.e. do virtualize afterwards
function fallbacks.apply(target,original)
- local mathparameters = target.mathparameters -- why not hasmath
- if mathparameters then
- local characters = target.characters
- local parameters = target.parameters
- local mathsize = parameters.mathsize
- local size = parameters.size
- local usedfonts = target.fonts
- if not usedfonts then
- usedfonts = { }
- target.fonts = usedfonts
- end
- -- This is not okay yet ... we have no proper way to refer to 'self'
- -- otherwise I will make my own id allocator).
- local self = #usedfonts == 0 and font.nextid() or nil -- will be true
- local textid, scriptid, scriptscriptid
- local textindex, scriptindex, scriptscriptindex
- local textdata, scriptdata, scriptscriptdata
- if mathsize == 3 then
- -- scriptscriptsize
- -- textid = nil -- self
- -- scriptid = nil -- no smaller
- -- scriptscriptid = nil -- no smaller
- textid = self
- scriptid = self
- scriptscriptid = self
- elseif mathsize == 2 then
- -- scriptsize
- -- textid = nil -- self
- textid = self
- scriptid = lastmathids[3]
- scriptscriptid = lastmathids[3]
- else
- -- textsize
- -- textid = nil -- self
- textid = self
- scriptid = lastmathids[2]
- scriptscriptid = lastmathids[3]
- end
- if textid then
- textindex = #usedfonts + 1
- usedfonts[textindex] = { id = textid }
--- textdata = identifiers[textid] or target
- textdata = target
- else
- textdata = target
- end
- if scriptid then
- scriptindex = #usedfonts + 1
- usedfonts[scriptindex] = { id = scriptid }
- scriptdata = identifiers[scriptid]
- else
- scriptindex = textindex
- scriptdata = textdata
- end
- if scriptscriptid then
- scriptscriptindex = #usedfonts + 1
- usedfonts[scriptscriptindex] = { id = scriptscriptid }
- scriptscriptdata = identifiers[scriptscriptid]
- else
- scriptscriptindex = scriptindex
- scriptscriptdata = scriptdata
- end
- -- report_fallbacks("used textid: %S, used script id: %S, used scriptscript id: %S",textid,scriptid,scriptscriptid)
- local data = {
- textdata = textdata,
- scriptdata = scriptdata,
- scriptscriptdata = scriptscriptdata,
- textindex = textindex,
- scriptindex = scriptindex,
- scriptscriptindex = scriptscriptindex,
- textid = textid,
- scriptid = scriptid,
- scriptscriptid = scriptscriptid,
- characters = characters,
- unicode = k,
- target = target,
- original = original,
- size = size,
- mathsize = mathsize,
- }
- target.mathrelation = data
- -- inspect(usedfonts)
- for k, v in next, virtualcharacters do
- if not characters[k] then
- local tv = type(v)
- local cd = nil
- if tv == "table" then
- cd = v
- elseif tv == "number" then
- cd = characters[v]
- elseif tv == "function" then
- cd = v(data)
- end
- if cd then
- characters[k] = cd
- else
- -- something else
- end
- if trace_fallbacks and characters[k] then
- report_fallbacks("extending math font %a with %U",target.properties.fullname,k)
- end
+ local mathparameters = target.mathparameters
+ if not mathparameters then
+ return
+ end
+ -- we also have forcedsize ... at this moment we already passed through
+ -- constructors.scale so we have this set
+ local parameters = target.parameters
+ local mathsize = parameters.mathsize
+ if mathsize < 1 or mathsize > 3 then
+ return
+ end
+ local characters = target.characters
+ local size = parameters.size
+ local usedfonts = target.fonts
+ if not usedfonts then
+ usedfonts = { }
+ target.fonts = usedfonts
+ end
+ -- This is not okay yet ... we have no proper way to refer to 'self'
+ -- otherwise I will make my own id allocator).
+ local self = #usedfonts == 0 and font.nextid() or nil -- will be true
+ local textid, scriptid, scriptscriptid
+ local textindex, scriptindex, scriptscriptindex
+ local textdata, scriptdata, scriptscriptdata
+ if mathsize == 3 then
+ -- scriptscriptsize
+ -- textid = nil -- self
+ -- scriptid = nil -- no smaller
+ -- scriptscriptid = nil -- no smaller
+ textid = self
+ scriptid = self
+ scriptscriptid = self
+ elseif mathsize == 2 then
+ -- scriptsize
+ -- textid = nil -- self
+ textid = self
+ scriptid = lastmathids[3]
+ scriptscriptid = lastmathids[3]
+ else
+ -- textsize
+ -- textid = nil -- self
+ textid = self
+ scriptid = lastmathids[2]
+ scriptscriptid = lastmathids[3]
+ end
+ if textid then
+ textindex = #usedfonts + 1
+ usedfonts[textindex] = { id = textid }
+ -- textdata = identifiers[textid] or target
+ textdata = target
+ else
+ textdata = target
+ end
+ if scriptid then
+ scriptindex = #usedfonts + 1
+ usedfonts[scriptindex] = { id = scriptid }
+ scriptdata = identifiers[scriptid]
+ else
+ scriptindex = textindex
+ scriptdata = textdata
+ end
+ if scriptscriptid then
+ scriptscriptindex = #usedfonts + 1
+ usedfonts[scriptscriptindex] = { id = scriptscriptid }
+ scriptscriptdata = identifiers[scriptscriptid]
+ else
+ scriptscriptindex = scriptindex
+ scriptscriptdata = scriptdata
+ end
+ -- report_fallbacks("used textid: %S, used script id: %S, used scriptscript id: %S",textid,scriptid,scriptscriptid)
+ local data = {
+ textdata = textdata,
+ scriptdata = scriptdata,
+ scriptscriptdata = scriptscriptdata,
+ textindex = textindex,
+ scriptindex = scriptindex,
+ scriptscriptindex = scriptscriptindex,
+ textid = textid,
+ scriptid = scriptid,
+ scriptscriptid = scriptscriptid,
+ characters = characters,
+ unicode = k,
+ target = target,
+ original = original,
+ size = size,
+ mathsize = mathsize,
+ }
+ target.mathrelation = data
+ -- inspect(usedfonts)
+ for k, v in next, virtualcharacters do
+ if not characters[k] then
+ local tv = type(v)
+ local cd = nil
+ if tv == "table" then
+ cd = v
+ elseif tv == "number" then
+ cd = characters[v]
+ elseif tv == "function" then
+ cd = v(data)
+ end
+ if cd then
+ characters[k] = cd
+ else
+ -- something else
+ end
+ if trace_fallbacks and characters[k] then
+ report_fallbacks("extending math font %a with %U",target.properties.fullname,k)
end
end
- data.unicode = nil
end
+ data.unicode = nil
end
utilities.sequencers.appendaction("aftercopyingcharacters","system","mathematics.fallbacks.apply")
diff --git a/tex/context/base/mkiv/metatex.lus b/tex/context/base/mkiv/metatex.lus
deleted file mode 100644
index df7bc1914..000000000
--- a/tex/context/base/mkiv/metatex.lus
+++ /dev/null
@@ -1,9 +0,0 @@
-if not modules then modules = { } end modules ['metatex'] = {
- version = 1.001,
- comment = "companion to metatex.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-return "luat-cod.lua"
diff --git a/tex/context/base/mkiv/metatex.tex b/tex/context/base/mkiv/metatex.tex
index b5f54c4ee..7c8a7ff01 100644
--- a/tex/context/base/mkiv/metatex.tex
+++ b/tex/context/base/mkiv/metatex.tex
@@ -22,144 +22,9 @@
%D A format is generated with the command;
%D
%D \starttyping
-%D luatools --make --compile metatex
+%D mtxrun --script metatex --make
%D \stoptyping
%D
-%D Remark: this is far from complete. We will gradually add
-%D more. Also, it's not yet clean what exactly will be part
-%D of it. This is a prelude to a configureable macro package.
-
-\catcode`\{=1 \catcode`\}=2 \catcode`\#=6
-
-\edef\metatexformat {\jobname}
-\edef\metatexversion{2007.04.03 13:01}
-
-\let\fmtname \metatexformat
-\let\fmtversion\metatexversion
-
-\ifx\normalinput\undefined \let\normalinput\input \fi
-
-\def\loadcorefile#1{\normalinput#1.tex \relax}
-\def\loadmarkfile#1{\normalinput#1.mkiv\relax}
-
-\loadmarkfile{syst-ini}
-
-\ifnum\luatexversion<60 % also change message
- \writestatus{!!!!}{Your luatex binary is too old, you need at least version 0.60!}
- \expandafter\end
-\fi
-
-\newtoks\metatexversiontoks \metatexversiontoks\expandafter{\metatexversion} % at the lua end
-
-\loadmarkfile{syst-pln} % plain tex initializations of internal registers (no further code)
-\loadmarkfile{syst-mes}
-
-\loadmarkfile{luat-cod} %
-\loadmarkfile{luat-bas} %
-\loadmarkfile{luat-lib} %
-
-% needs stripping:
-
-\loadmarkfile{catc-ini} % catcode table management
-\loadmarkfile{catc-act} % active character definition mechanisms
-\loadmarkfile{catc-def} % some generic catcode tables
-\loadmarkfile{catc-ctx} % a couple of context specific tables but expected by later modules
-\loadmarkfile{catc-sym} % some definitions related to \letter<tokens>
-
-% helpers, maybe less
-
-\loadmarkfile{syst-aux} % a whole lot of auxiliary macros
-%loadmarkfile{syst-lua} % some helpers using lua instead
-%loadmarkfile{syst-con} % some rather basic conversions
-%loadmarkfile{syst-fnt}
-%loadmarkfile{syst-str}
-%loadmarkfile{syst-rtp}
-
-% not needed
-
-% \loadmarkfile{supp-fil}
-% \loadmarkfile{supp-dir}
-
-% characters
-
-\loadmarkfile{char-utf}
-\loadmarkfile{char-ini}
-\loadmarkfile{char-enc} % \registerctxluafile{char-enc}{1.001}
-
-% attributes
-
-\loadmarkfile{attr-ini}
-
-% nodes
-
-\loadmarkfile{node-ini}
-%loadmarkfile{node-fin}
-%loadmarkfile{node-par}
-
-% attributes, not needed:
-
-%loadmarkfile{attr-ini}
-
-% regimes
-
-% \loadmarkfile{regi-ini}
-% \loadcorefile{regi-syn}
-
-% languages
-
-% fonts
-
-% \loadcorefile{enco-ini.mkiv}
-% \loadcorefile{hand-ini.mkiv}
-
-\registerctxluafile{font-ini}{1.001}
-
-\registerctxluafile{node-fnt}{1.001}
-
-\registerctxluafile{font-enc}{1.001}
-\registerctxluafile{font-map}{1.001}
-\registerctxluafile{font-syn}{1.001}
-\registerctxluafile{font-tfm}{1.001}
-\registerctxluafile{font-afm}{1.001}
-\registerctxluafile{font-cid}{1.001}
-\registerctxluafile{font-ott}{1.001}
-\registerctxluafile{font-otf}{1.001}
-\registerctxluafile{font-otb}{1.001}
-\registerctxluafile{font-otn}{1.001}
-\registerctxluafile{font-ota}{1.001}
-\registerctxluafile{font-otp}{1.001}
-\registerctxluafile{font-otc}{1.001}
-%registerctxluafile{font-vf} {1.001}
-\registerctxluafile{font-def}{1.001}
-%registerctxluafile{font-ctx}{1.001}
-\registerctxluafile{font-xtx}{1.001}
-%registerctxluafile{font-fbk}{1.001}
-%registerctxluafile{font-ext}{1.001}
-\registerctxluafile{font-pat}{1.001}
-%registerctxluafile{font-chk}{1.001}
-
-%registerctxluafile{math-ini}{1.001}
-%registerctxluafile{math-dim}{1.001}
-%registerctxluafile{math-ent}{1.001}
-%registerctxluafile{math-ext}{1.001}
-%registerctxluafile{math-vfu}{1.001}
-%registerctxluafile{math-map}{1.001}
-%registerctxluafile{math-noa}{1.001}
-
-\registerctxluafile{task-ini}{1.001}
-
-%registerctxluafile{l-xml}{1.001} % needed for font database
-
-% why not ...
-
-\pdfoutput\plusone
-
-% too
-
-\appendtoks
- \ctxlua{statistics.savefmtstatus("\jobname","\metatexversion","metatex.tex")}% can become automatic
-\to \everydump
-
-% done
-
-\errorstopmode \dump \endinput
+%D For the moment this is a placeholder. Maybe some day ... the old
+%D file history/metatex/metatex.tex so I can pick up from there if
+%D needed.
diff --git a/tex/context/base/mkiv/node-nut.lua b/tex/context/base/mkiv/node-nut.lua
index cbaeb8977..b42727e06 100644
--- a/tex/context/base/mkiv/node-nut.lua
+++ b/tex/context/base/mkiv/node-nut.lua
@@ -782,7 +782,7 @@ nodes.properties = {
}
------.set_properties_mode(true,false) -- shallow copy ... problem: in fonts we then affect the originals too
-direct.set_properties_mode(true,true) -- create metatable, slower but needed for font-inj.lua (unless we use an intermediate table)
+direct.set_properties_mode(true,true) -- create metatable, slower but needed for font-otj.lua (unless we use an intermediate table)
-- todo:
--
diff --git a/tex/context/base/mkiv/status-files.pdf b/tex/context/base/mkiv/status-files.pdf
index a38925207..7e122efb5 100644
--- a/tex/context/base/mkiv/status-files.pdf
+++ b/tex/context/base/mkiv/status-files.pdf
Binary files differ
diff --git a/tex/context/base/mkiv/status-lua.pdf b/tex/context/base/mkiv/status-lua.pdf
index 8e140db0c..f10b35e86 100644
--- a/tex/context/base/mkiv/status-lua.pdf
+++ b/tex/context/base/mkiv/status-lua.pdf
Binary files differ
diff --git a/tex/context/base/mkiv/status-mkiv.lua b/tex/context/base/mkiv/status-mkiv.lua
deleted file mode 100644
index ab1419c98..000000000
--- a/tex/context/base/mkiv/status-mkiv.lua
+++ /dev/null
@@ -1,7441 +0,0 @@
-return {
- ["core"]={
- {
- ["category"]="mkvi",
- ["filename"]="font-gds",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-run",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="font-sel",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="grph-pat",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="grph-rul",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="lang-rep",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="luat-usr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="math-mis",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="math-rad",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-cst",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="page-inj",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="page-lin",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="publ-imp-author",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="publ-imp-cite",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="publ-imp-commands",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="publ-imp-default",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="publ-imp-definitions",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="publ-imp-list",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="publ-imp-page",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="spac-flr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="spac-prf",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="strc-not",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="toks-map",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="toks-tra",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-chr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-inj",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-lig",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-lin",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-par",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-wrp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="syst-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="norm-ctx",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="syst-pln",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="syst-mes",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="luat-cod",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe combine (3)",
- ["filename"]="luat-bas",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe combine (3)",
- ["filename"]="luat-lib",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="catc-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="forward dependency",
- ["filename"]="catc-act",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="catc-def",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="catc-ctx",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="catc-sym",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="only needed for mkii xml parser",
- ["filename"]="catc-xml",
- ["loading"]="module",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe combine (1)",
- ["filename"]="cldf-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="syst-aux",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe combine (1)",
- ["filename"]="syst-lua",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe combine (1)",
- ["filename"]="syst-con",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe combine (1)",
- ["filename"]="syst-fnt",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe combine (1)",
- ["filename"]="syst-rtp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="maybe combine (2)",
- ["filename"]="file-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="maybe combine (2)",
- ["filename"]="file-res",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="file-lib",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="supp-dir",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="char-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="char-utf",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="forward dependency",
- ["filename"]="char-act",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="mult-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="mult-sys",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="mult-aux",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="mult-def",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="mult-chk",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="mult-dim",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cldf-int",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="luat-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="toks-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="attr-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="attr-mkr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="code might move from here",
- ["filename"]="core-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="might need more redoing",
- ["filename"]="core-env",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="in due time more might move to here",
- ["filename"]="layo-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe this becomes a runtime module",
- ["filename"]="node-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe use context.generics/context.sprint here",
- ["filename"]="cldf-bas",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="might need more redoing",
- ["filename"]="node-fin",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="needs integration and configuration",
- ["filename"]="node-mig",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-bld",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-sus",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="node-pag",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="back-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="attr-col",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="attr-lay",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="attr-neg",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="attr-eff",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="needs more usage",
- ["filename"]="trac-tex",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="trac-deb",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="trac-ctx",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="supp-box",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="supp-ran",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="will be moved to the math-* modules",
- ["filename"]="supp-mat",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="will grow",
- ["filename"]="typo-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="file-syn",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="file-mod",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="core-con",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-fil",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-nop",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-yes",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="regi-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="enco-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="hand-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="lang-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="namespace should be languages",
- ["filename"]="lang-lab",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="lang-hyp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="unic-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="core-uti",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe rename to core-two",
- ["filename"]="core-two",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="core-dat",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="colo-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="colo-ext",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="colo-grp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="node-bck",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="pack-cut",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="lang-mis",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="lang-url",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="lang-def",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="lang-hyp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="lang-frq",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="lang-frd",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="lang-wrd",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="might need more redoing",
- ["filename"]="file-job",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="symb-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="sort-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="pack-mis",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="pack-rul",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="endpar experimental code",
- ["filename"]="pack-mrl",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="pack-bck",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="pack-fen",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="lxml-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="lxml-sor",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="typo-prc",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="strc-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="strc-tag",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="this module might go away when code has been moved",
- ["filename"]="strc-doc",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="still some rough edges",
- ["filename"]="strc-num",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="strc-mar",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="strc-sbe",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="strc-lst",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="some of the local current and synchronization macros will be renamed",
- ["filename"]="strc-sec",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="strc-pag",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="(support for) setups might get improved",
- ["filename"]="strc-ren",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="this module might go away",
- ["filename"]="strc-xml",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="strc-def",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="some more low level names might change",
- ["filename"]="strc-ref",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="use setups for rendering",
- ["filename"]="strc-reg",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="strc-lev",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe some tuning is needed / will happen",
- ["filename"]="spac-ali",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="probably needs some more work",
- ["filename"]="spac-hor",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe some changes will happen",
- ["filename"]="spac-ver",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="could be improved if needed",
- ["filename"]="spac-lin",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="this needs to be checked occasionally",
- ["filename"]="spac-pag",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="spac-par",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="spac-def",
- ["loading"]="always",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["comment"]="needs thinking and redoing",
- ["filename"]="spac-grd",
- ["loading"]="always",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["comment"]="in transition",
- ["filename"]="anch-pos",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="maybe change locationattribute names",
- ["filename"]="scrn-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="namespace needs checking",
- ["filename"]="scrn-ref",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="will change when we have objects at lua end",
- ["filename"]="pack-obj",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="strc-itm",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="maybe more common counter code here and setups need to be improved",
- ["filename"]="strc-con",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="strc-des",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="(interactive) coupling is not yet working",
- ["filename"]="strc-enu",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="strc-ind",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="needs to be adapted when strc-con/des/enu changes",
- ["filename"]="strc-lab",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="strc-syn",
- ["loading"]="always",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["comment"]="a funny mix",
- ["filename"]="core-sys",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-var",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="page-otr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="code might end up elsewhere",
- ["filename"]="page-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="dealing with insertions might change",
- ["filename"]="page-ins",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-fac",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="otr commands will be redone",
- ["filename"]="page-brk",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="helpers for columns",
- ["filename"]="page-col",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="room for improvement and extension",
- ["filename"]="page-inf",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-grd",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="will be extended when columns are redone",
- ["filename"]="page-flt",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-bck",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-not",
- ["loading"]="always",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["comment"]="can probably be improved",
- ["filename"]="page-one",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-lay",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="page-box",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="a few things left",
- ["filename"]="page-txt",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-sid",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="in due time we need a further cleanup",
- ["filename"]="strc-flt",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-pst",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="might be extended",
- ["filename"]="page-mbk",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="will be reimplemented",
- ["filename"]="page-mul",
- ["loading"]="always",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["comment"]="work in progress",
- ["filename"]="page-mix",
- ["loading"]="always",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["comment"]="will be reimplemented",
- ["filename"]="page-set",
- ["loading"]="always",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="pack-lyr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="pack-pos",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="page-mak",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="might get extended",
- ["filename"]="page-par",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-pag",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-mar",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-itm",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="check other modules for buffer usage",
- ["filename"]="buff-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="check obsolete processbuffer",
- ["filename"]="buff-ver",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="experimental code",
- ["filename"]="buff-par",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="symb-imp-cc",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="buff-imp-default",
- ["loading"]="indirect",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="buff-imp-escaped",
- ["loading"]="indirect",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="buff-imp-nested",
- ["loading"]="indirect",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="strc-blk",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-imp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="maybe some extensions and delayed loading, needs checking",
- ["filename"]="page-sel",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-com",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="namespace needs checking",
- ["filename"]="scrn-pag",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="functionality needs checking",
- ["filename"]="scrn-wid",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="namespace needs checking",
- ["filename"]="scrn-but",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="namespace needs checking",
- ["filename"]="scrn-bar",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="strc-bkm",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-tal",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="somewhat weird",
- ["filename"]="tabl-com",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="unchecked",
- ["filename"]="tabl-pln",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="tabl-tab",
- ["loading"]="always",
- ["status"]="pending",
- },
- {
- ["category"]="mkiv",
- ["comment"]="can probably be improved (names and such)",
- ["filename"]="tabl-tbl",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="frozen functionaly so no drastic cleanup",
- ["filename"]="tabl-ntb",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="tabl-mis",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="tabl-nte",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="will be redone when needed",
- ["filename"]="tabl-ltb",
- ["loading"]="always",
- ["status"]="pending",
- },
- {
- ["category"]="mkiv",
- ["comment"]="will be adapted when needed (and rest is done)",
- ["filename"]="tabl-tsp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="namespace needs checking",
- ["filename"]="tabl-xtb",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="only when natural tables need a replacement",
- ["filename"]="tabl-xnt",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="java-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="namespace needs checking",
- ["filename"]="scrn-fld",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="namespace needs checking",
- ["filename"]="scrn-hlp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="char-enc",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="font-lib",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="font-fil",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="font-var",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="font-fea",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="font-mat",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="needs occasional checking and upgrading",
- ["filename"]="font-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="font-sym",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="font-sty",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="font-set",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="font-emp",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="font-col",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="font-pre",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="font-unk",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="likely this will become a module",
- ["filename"]="font-tra",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="this could become a module",
- ["filename"]="font-chk",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="this one might be merged",
- ["filename"]="font-uni",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="font-col",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="font-aux",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-lan",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="this is work in progress",
- ["filename"]="lxml-css",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="spac-chr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="work in progress",
- ["filename"]="blob-ini",
- ["loading"]="always",
- ["status"]="pending",
- },
- {
- ["category"]="mkiv",
- ["filename"]="trac-jus",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="trac-vis",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-cln",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-spa",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="do we keep the style and color or not",
- ["filename"]="typo-krn",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="typo-itc",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe singular setup",
- ["filename"]="typo-dir",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-brk",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-cap",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-dig",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-rep",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="maybe there will be a nicer interface",
- ["filename"]="typo-txt",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-drp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-fln",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="type-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-set",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-def",
- ["loading"]="type-set",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-fbk",
- ["loading"]="type-set",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-lua",
- ["loading"]="type-set",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-one",
- ["loading"]="type-set",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-otf",
- ["loading"]="type-set",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-siz",
- ["loading"]="type-set",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="placeholder to prevent other loading",
- ["filename"]="type-tmf",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="scrp-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="this module is obsolete",
- ["filename"]="prop-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="mlib-ctx",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="metapost code is always evolving",
- ["filename"]="meta-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="code used in a project",
- ["filename"]="meta-lua",
- ["loading"]="experimental",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-fnt",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-tex",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe this one will be merged",
- ["filename"]="meta-fun",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="might get updated when mp code gets cleaned up",
- ["filename"]="meta-pag",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-grd",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-mrk",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-flw",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-spr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="will be made better",
- ["filename"]="page-plg",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="needs more work (and thinking)",
- ["filename"]="page-str",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="in transition",
- ["filename"]="anch-pgr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="in transition",
- ["filename"]="anch-bck",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="will stay experimental for a while",
- ["filename"]="anch-tab",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="anch-bar",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="interesting old mechanism to keep around (module?)",
- ["filename"]="anch-snc",
- ["loading"]="always",
- ["status"]="pending",
- },
- {
- ["category"]="mkiv",
- ["filename"]="math-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="this file might merge into others",
- ["filename"]="math-pln",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="math-for",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="eventually this will be split and spread",
- ["filename"]="math-def",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="will be checked and improved",
- ["filename"]="math-ali",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="needs testing",
- ["filename"]="math-arr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="math-stc",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="math-acc",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="at least for the moment",
- ["filename"]="math-frc",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="math-scr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="math-int",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="code get replaced (by autodelimiters)",
- ["filename"]="math-del",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="math-fen",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="code might move to here",
- ["filename"]="math-inl",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="code might move to here",
- ["filename"]="math-dis",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="phys-dim",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="some more functionality will end up here",
- ["filename"]="strc-mat",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="chem-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="chem-str",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-scr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe some cleanup is needed",
- ["filename"]="node-rul",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="needs testing",
- ["filename"]="font-sol",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="strc-not",
- ["loading"]="always",
- ["status"]="todo",
- },
- {
- ["category"]="mkvi",
- ["comment"]="will be extended as part of crited",
- ["filename"]="strc-lnt",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="pack-com",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-del",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="grph-trf",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="grph-inc",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="grph-fig",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="grph-raw",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="pack-box",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="pack-bar",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-app",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-fig",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="more or less obsolete",
- ["filename"]="lang-spa",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="bibl-bib",
- ["loading"]="on demand",
- ["status"]="pending",
- },
- {
- ["category"]="mkiv",
- ["filename"]="bibl-tra",
- ["loading"]="on demand",
- ["status"]="pending",
- },
- {
- ["category"]="mkiv",
- ["comment"]="not needed",
- ["filename"]="meta-xml",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-log",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="task-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cldf-ver",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cldf-com",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="core-ctx",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="will always be messy",
- ["filename"]="core-def",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="object related code might move or change",
- ["filename"]="back-pdf",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="no code, just an example of usage",
- ["filename"]="back-swf",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="no code, just an example of usage",
- ["filename"]="back-u3d",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="mlib-pdf",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="mlib-pps",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-pdf",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="might need more work",
- ["filename"]="grph-epd",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="some parameters might move from export to backend",
- ["filename"]="back-exp",
- ["loading"]="always",
- ["status"]="okay",
- },
- },
- ["extras"]={
- {
- ["category"]="tex",
- ["comment"]="add-on for mtx-context",
- ["filename"]="mtx-context-arrange",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="tex",
- ["comment"]="add-on for mtx-context",
- ["filename"]="mtx-context-combine",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="tex",
- ["comment"]="add-on for mtx-context",
- ["filename"]="mtx-context-common",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="tex",
- ["comment"]="add-on for mtx-context",
- ["filename"]="mtx-context-ideas",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="tex",
- ["comment"]="add-on for mtx-context",
- ["filename"]="mtx-context-listing",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="tex",
- ["comment"]="add-on for mtx-context",
- ["filename"]="mtx-context-markdown",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="tex",
- ["comment"]="add-on for mtx-context",
- ["filename"]="mtx-context-select",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="tex",
- ["comment"]="add-on for mtx-context",
- ["filename"]="mtx-context-timing",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- },
- ["implementations"]={
- {
- ["category"]="mkiv",
- ["filename"]="symb-imp-fontawesome",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-ebgaramond",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-gentium",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-ipaex",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-lato",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-libertinus",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-mathdigits",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-minion",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-opendyslexic",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-source",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="buff-imp-tex",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="buff-imp-mp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="buff-imp-lua",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="buff-imp-xml",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="buff-imp-parsed-xml",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-imp-grid",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-imp-mat",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-imp-outlines",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-imp-tab",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="publ-imp-apa",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="publ-imp-aps",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="colo-imp-crayola",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="colo-imp-rainbow",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="colo-imp-ral",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="colo-imp-dem",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="colo-imp-ema",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="colo-imp-rgb",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="colo-imp-x11",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="colo-imp-xwi",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="java-imp-exa",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="java-imp-fil",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="java-imp-fld",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="java-imp-rhh",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="java-imp-stp",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-imp-clp",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-imp-dum",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-imp-fen",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-imp-mis",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-imp-nav",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-imp-pre",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-imp-txt",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="symb-imp-cow",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="symb-imp-eur",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="symb-imp-jmn",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="symb-imp-mis",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="symb-imp-mvs",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="symb-imp-nav",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-antykwa",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-antykwapoltawskiego",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-asana",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-averia",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-buy",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-cambria",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-charter",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-cleartype",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-computer-modern-unicode",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-cow",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-dejavu",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-euler",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-ghz",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-hgz",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-husayni",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-hvmath",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-inconsolata",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-informal",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-iwona",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-kurier",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-latinmodern",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-liberation",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-libertine",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-lmnames",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-lucida-opentype",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-lucida-typeone",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-mathdesign",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-mathtimes",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-mscore",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-osx",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-postscript",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-punknova",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-texgyre",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-unfonts",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-xits",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-xitsbidi",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="publ-ini",
- ["loading"]="always",
- ["status"]="pending",
- },
- {
- ["category"]="mkiv",
- ["filename"]="publ-old",
- ["loading"]="always",
- ["status"]="pending",
- },
- {
- ["category"]="mkiv",
- ["filename"]="publ-tra",
- ["loading"]="always",
- ["status"]="pending",
- },
- {
- ["category"]="mkiv",
- ["filename"]="publ-usr",
- ["loading"]="always",
- ["status"]="pending",
- },
- {
- ["category"]="mkiv",
- ["filename"]="publ-xml",
- ["loading"]="always",
- ["status"]="pending",
- },
- },
- ["lua"]={
- {
- ["category"]="lua",
- ["filename"]="anch-pgr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="bibl-tst",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="char-fio",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="cldf-prs",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="cldf-scn",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="cldf-stp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="cont-run",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-cff",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-cft",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-dsp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-gbn",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-hsh",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-mps",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-nod",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-ocl",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-odk",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-odv",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-off",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-one",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-onr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-osd",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-otj",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-otl",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-oto",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-otr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-ots",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-oup",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-sel",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-shp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-ttf",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-web",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-xtx",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="good-ctx",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="good-gen",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="good-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="good-mth",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="grph-con",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="grph-mem",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="grph-pat",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="grph-rul",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="l-gzip",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="l-lua",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="l-package",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="l-sandbox",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-cnt",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-frq-de",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-frq-en",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-frq-nl",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-frq-pt",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-rep",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="luat-usr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lxml-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="math-dir",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="mlib-int",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="mlib-lua",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="node-ltp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="node-scn",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="node-met",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="node-nut",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="page-cst",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="page-inj",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-imp-apa",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-imp-aps",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-imp-default",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-imp-replacements",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-jrn",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-reg",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-sor",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-ibm",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-pdfdoc",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="scrp-tha",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="spac-prf",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="toks-map",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="toks-tra",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="trac-par",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-chr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-duc",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-inj",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-lin",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-tal",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-wrp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-fil",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-lib-imp-gm",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-lib-imp-gs",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-ran",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sac",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sbx",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sci",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-soc",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sql-imp-client",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sql-imp-library",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sql-imp-sqlite",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sql-imp-swiglib",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sql-loggers",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sql-sessions",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sql-tickets",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sql-tracers",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sql-users",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-you",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="m-escrito",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-characters-properties",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-languages-words",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-xml-analyzers",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="x-math-svg",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="anch-pos",
- ["loading"]="anch-pos",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="attr-col",
- ["loading"]="attr-col",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="attr-eff",
- ["loading"]="attr-eff",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="attr-ini",
- ["loading"]="attr-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="attr-lay",
- ["loading"]="attr-lay",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="attr-neg",
- ["loading"]="attr-neg",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="attr-mkr",
- ["loading"]="attr-mkr",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="experimental code, maybe some will move elsewhere",
- ["filename"]="back-exp",
- ["loading"]="back-exp",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="back-ini",
- ["loading"]="back-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="back-pdf",
- ["loading"]="back-pdf",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="bibl-bib",
- ["loading"]="on demand",
- },
- {
- ["category"]="lua",
- ["filename"]="bibl-tra",
- ["loading"]="on demand",
- },
- {
- ["category"]="lua",
- ["filename"]="blob-ini",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="buff-imp-default",
- ["loading"]="buff-imp-default",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="buff-imp-escaped",
- ["loading"]="buff-imp-escaped",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="todo: colorization and nesting as in scite",
- ["filename"]="buff-imp-lua",
- ["loading"]="buff-imp-lua",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="todo: colorization and nesting as in scite",
- ["filename"]="buff-imp-mp",
- ["loading"]="buff-imp-mp",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="buff-imp-nested",
- ["loading"]="buff-imp-nested",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="buff-imp-parsed-xml",
- ["loading"]="buff-imp-parsed-xml",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="todo: colorization and nesting as in scite",
- ["filename"]="buff-imp-tex",
- ["loading"]="buff-imp-tex",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="todo: colorization and nesting as in scite",
- ["filename"]="buff-imp-xml",
- ["loading"]="buff-imp-xml",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="buff-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="buff-par",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="maybe we will provide a few more (nesting) methods",
- ["filename"]="buff-ver",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="catc-ini",
- ["loading"]="catc-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="char-cjk",
- ["loading"]="char-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="char-def",
- ["loading"]="char-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="maybe dataonly",
- ["filename"]="char-enc",
- ["loading"]="char-enc",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="char-ent",
- ["loading"]="char-ent",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="maybe move blocks table to separate (dataonly) file",
- ["filename"]="char-ini",
- ["loading"]="char-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="char-map",
- ["loading"]="char-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="char-tex",
- ["loading"]="char-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="char-utf",
- ["loading"]="char-utf",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="chem-ini",
- ["loading"]="chem-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="chem-str",
- ["loading"]="chem-str",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="will be extended and can be optimized if needed",
- ["filename"]="cldf-bas",
- ["loading"]="cldf-bas",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="might change or even go away",
- ["filename"]="cldf-com",
- ["loading"]="cldf-com",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="cldf-ini",
- ["loading"]="cldf-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="cldf-int",
- ["loading"]="cldf-int",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["comment"]="maybe this code can be redone more efficiently/robust",
- ["filename"]="cldf-ver",
- ["loading"]="cldf-ver",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["comment"]="also used in mtx-*",
- ["filename"]="colo-icc",
- ["loading"]="colo-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="colo-ini",
- ["loading"]="colo-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="this code might move to a module",
- ["filename"]="colo-run",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="core-con",
- ["loading"]="core-con",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="core-ctx",
- ["loading"]="core-ctx",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="core-dat",
- ["loading"]="core-dat",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="maybe abusing the tex namespace is wrong",
- ["filename"]="core-env",
- ["loading"]="core-env",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="core-sys",
- ["loading"]="core-sys",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["commands"]="this is in fact replaced by core-dat",
- ["filename"]="core-two",
- ["loading"]="core-two",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="some code will move to better places",
- ["filename"]="core-uti",
- ["loading"]="core-uti",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="data-aux",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-bin",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-con",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-crl",
- ["loading"]="never",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-ctx",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-env",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-exp",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-fil",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-gen",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-ini",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-inp",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-lst",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-lua",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-met",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-out",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-pre",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-res",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-sch",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-tex",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-tmf",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-tmp",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-tre",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-use",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-vir",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-zip",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="file-ini",
- ["loading"]="file-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="file-job",
- ["loading"]="file-job",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="file-lib",
- ["loading"]="file-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="file-mod",
- ["loading"]="file-mod",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="file-res",
- ["loading"]="file-res",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="file-syn",
- ["loading"]="file-syn",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-afm",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-afk",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="only used in luatex-fonts",
- ["filename"]="font-age",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-agl",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="needs some documentation in usage",
- ["filename"]="font-aux",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="move more to the commands namespace",
- ["filename"]="font-chk",
- ["loading"]="font-chk",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-cid",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-col",
- ["loading"]="font-col",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-con",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="will be improved over time",
- ["filename"]="font-ctx",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-def",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="part of this code is obsolete",
- ["filename"]="font-enc",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="needs documentation at the tex end",
- ["filename"]="font-enh",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="maybe some data tables can be be external",
- ["filename"]="font-ext",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="okay but can be improved",
- ["filename"]="font-fbk",
- ["loading"]="font-lib",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="font-ini",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-inj",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-ldr",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-log",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="currently rather minimalistic",
- ["filename"]="font-lua",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="the lum file support will be dropped / no map files anyway",
- ["filename"]="font-map",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-mis",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="when more scripts are supported we might end up with imp files",
- ["filename"]="font-ota",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-otb",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-otc",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-otd",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-otf",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-oth",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-oti",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-otn",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-otp",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-ott",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="is mostly replaced by lfg files",
- ["filename"]="font-pat",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-sol",
- ["loading"]="font-sol",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="also loaded on demand",
- ["filename"]="font-syn",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-tfm",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-trt",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-vf",
- ["loading"]="font-lib",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="grph-epd",
- ["loading"]="grph-epd",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="grph-fil",
- ["loading"]="grph-inc",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="grph-inc",
- ["loading"]="grph-inc",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="grph-raw",
- ["loading"]="grph-raw",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="grph-swf",
- ["loading"]="grph-swf",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="grph-u3d",
- ["loading"]="grph-u3d",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="experiment with graphic magick library",
- ["filename"]="grph-wnd",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="java-ini",
- ["loading"]="java-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="l-boolean",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-dir",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-file",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-function",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-io",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-lpeg",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-math",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-md5",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-number",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-os",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-pdfview",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-set",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-string",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-table",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-unicode",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-url",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-xml",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-def",
- ["loading"]="lang-def",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-dis",
- ["loading"]="lang-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-hyp",
- ["loading"]="lang-hyp",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-ini",
- ["loading"]="lang-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-lab",
- ["loading"]="lang-lab",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-hyp",
- ["loading"]="lang-hyp",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-txt",
- ["loading"]="lang-lab",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="maybe another approach is nicer",
- ["filename"]="lang-url",
- ["loading"]="lang-url",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-wrd",
- ["loading"]="lang-wrd",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="more will end up here",
- ["filename"]="layo-ini",
- ["loading"]="layo-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-ano",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-res",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-col",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-enc",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-epa",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-epd",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-fld",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-fmt",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-grp",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-ini",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-mis",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-mov",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-nod",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-ren",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-swf",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-tag",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-u3d",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-wid",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-xmp",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["comment"]="replacement code for wd/ht/dp",
- ["filename"]="luat-bwc",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="luat-cbk",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="luat-cnf",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="maybe some code should move",
- ["filename"]="luat-cod",
- ["loading"]="luat-cod",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="luat-env",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="luat-exe",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="luat-fio",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="luat-fmt",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="will be upgraded when we have Lua 5.2",
- ["filename"]="luat-ini",
- ["loading"]="luat-lib",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["comment"]="will be upgraded when we have Lua 5.2",
- ["filename"]="util-env",
- ["loading"]="luat-lib",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="luat-iop",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="this is likely to change some day",
- ["filename"]="luat-lua",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="luat-mac",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="luat-run",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="related to the socket code",
- ["filename"]="luat-soc",
- ["loading"]="on demand",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="luat-sta",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="luat-sto",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lxml-aux",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lxml-css",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lxml-ctx",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lxml-dir",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lxml-ent",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lxml-inf",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lxml-lpt",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lxml-mis",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lxml-sor",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lxml-tab",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lxml-tex",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lxml-xml",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="m-chart",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="m-database",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="m-nodechart",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="m-markdown",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="m-pstricks",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="m-spreadsheet",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="m-steps",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="math-act",
- ["loading"]="math-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="math-frc",
- ["loading"]="math-frc",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="could be made look nicer, but who cares",
- ["filename"]="math-dim",
- ["loading"]="math-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="the code is related to math-vfu",
- ["filename"]="math-ext",
- ["loading"]="math-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="math-fbk",
- ["loading"]="math-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="math-frc",
- ["loading"]="math-frc",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="okay, but we might have a few more low level definers some day",
- ["filename"]="math-ini",
- ["loading"]="math-ini",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="math-map",
- ["loading"]="math-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="math-noa",
- ["loading"]="math-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="math-ren",
- ["loading"]="math-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="math-tag",
- ["loading"]="math-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="math-ttv",
- ["loading"]="math-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="math-vfu",
- ["loading"]="math-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="this is just a first version",
- ["filename"]="meta-fun",
- ["loading"]="meta-fun",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="meta-ini",
- ["loading"]="meta-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="meta-lua",
- ["loading"]="meta-lua",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="meta-fnt",
- ["loading"]="meta-fnt",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="could be done nicer nowadays but who needs it",
- ["filename"]="meta-pdf",
- ["loading"]="meta-pdf",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="this is historic code that we keep around",
- ["filename"]="meta-pdh",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="meta-tex",
- ["loading"]="meta-tex",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="mlib-ctx",
- ["loading"]="mlib-ctx",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="mlib-pdf",
- ["loading"]="mlib-pdf",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="mlib-pps",
- ["loading"]="mlib-pdf",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="mlib-run",
- ["loading"]="mlib-ctx",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="this is an experiment, namespaces need to be dealt with properly",
- ["filename"]="mult-aux",
- ["loading"]="mult-aux",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["comment"]="this is an experiment",
- ["filename"]="mult-chk",
- ["loading"]="mult-chk",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="mult-def",
- ["loading"]="mult-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="used for generating editor lexing files",
- ["filename"]="mult-fun",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="mult-ini",
- ["loading"]="mult-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="used for generating editor lexing files",
- ["filename"]="mult-low",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="all messages need to be checked",
- ["filename"]="mult-mes",
- ["loading"]="mult-ini",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["comment"]="used for generating editor lexing files",
- ["filename"]="mult-mps",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="used for generating editor lexing files",
- ["filename"]="mult-prm",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="node-acc",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-aux",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-bck",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-dir",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-ext",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-fin",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-fnt",
- ["loading"]="font-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-ini",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-mig",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-pag",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-ppt",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-pro",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-ref",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-res",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-rul",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-ser",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-shp",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-tex",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-tra",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-snp",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-tsk",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-tst",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-typ",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["comment"]="will be extended when we have opened up pdf objects",
- ["filename"]="pack-obj",
- ["loading"]="pack-obj",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="pack-rul",
- ["loading"]="pack-rul",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="page-otr",
- ["loading"]="page-otr",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="page-flt",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="page-ins",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="page-lin",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="page-mix",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="page-pst",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="page-str",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="phys-dim",
- ["loading"]="phys-dim",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-1",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-10",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-11",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-13",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-14",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-15",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-16",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-2",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-3",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-4",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-5",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-6",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-7",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-8",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-9",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-cp1250",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-cp1251",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-cp1252",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-cp1253",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-cp1254",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-cp1255",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-cp1256",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-cp1257",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-cp1258",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="just a demo file",
- ["filename"]="regi-demo",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-ini",
- ["loading"]="regi-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-fonts-coverage",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-fonts-features",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-fonts-missing",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-fonts-shapes",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-fonts-system",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-fonts-tables",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-fonts-vectors",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-languages-counters",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-languages-frequencies",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-languages-hyphenation",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-languages-sorting",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-languages-system",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-math-characters",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-math-coverage",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-fonts-goodies",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-math-parameters",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-pre-71",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="s-sql-tables",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="scrn-but",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="scrn-fld",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="scrn-hlp",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="scrn-ini",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="scrn-pag",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="scrn-ref",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="scrn-wid",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["comment"]="we can speed this up",
- ["filename"]="scrp-cjk",
- ["loading"]="scrp-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="we can speed this up",
- ["filename"]="scrp-eth",
- ["loading"]="scrp-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="scrp-ini",
- ["loading"]="scrp-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="sort-ini",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="sort-lan",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="spac-adj",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="spac-ali",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="spac-chr",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="spac-hor",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="spac-ver",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="status-mkiv",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-bkm",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-blk",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-con",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-doc",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-flt",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-ini",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-itm",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-lev",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-lst",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-mar",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-mat",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-not",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-num",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-pag",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-ref",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-reg",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-rsc",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-syn",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-tag",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="supp-box",
- ["loading"]="supp-box",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="supp-ran",
- ["loading"]="supp-ran",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="symb-ini",
- ["loading"]="symb-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="there will be more in here",
- ["filename"]="syst-aux",
- ["loading"]="syst-aux",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="do some tests with speedups (sprint)",
- ["filename"]="syst-con",
- ["loading"]="syst-con",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["comment"]="do some tests with speedups (less tokens)",
- ["filename"]="syst-lua",
- ["loading"]="syst-lua",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="tabl-tbl",
- ["loading"]="tabl-tbl",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="work in progress",
- ["filename"]="tabl-xtb",
- ["loading"]="tabl-xtb",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="we need a well defined defintion moment",
- ["filename"]="task-ini",
- ["loading"]="task-ini",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="toks-ini",
- ["loading"]="toks-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="toks-scn",
- ["loading"]="toks-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="must be applied in more places",
- ["filename"]="trac-ctx",
- ["loading"]="trac-ctx",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="trac-deb",
- ["loading"]="trac-deb",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="for the moment somewhat private",
- ["filename"]="trac-fil",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="trac-inf",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="will be redone and extended",
- ["filename"]="trac-lmx",
- ["loading"]="luat-lib",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="trac-log",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="trac-xml",
- ["loading"]="mtxrun",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="trac-exp",
- ["loading"]="mtxrun",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="experimental code, will be redone when lua 5.2",
- ["filename"]="trac-pro",
- ["loading"]="luat-lib",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["comment"]="some code can better be in util-set",
- ["filename"]="trac-set",
- ["loading"]="luat-lib",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="trac-tex",
- ["loading"]="trac-tex",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="trac-tim",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="trac-vis",
- ["loading"]="trac-vis",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="trac-jus",
- ["loading"]="trac-jus",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="type-ini",
- ["loading"]="type-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-bld",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-sus",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-brk",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-cap",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-cln",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-dig",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-dir",
- ["loading"]="typo-dir",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="work in progress",
- ["filename"]="typo-dha",
- ["loading"]="typo-dir",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-dua",
- ["loading"]="typo-dir",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="work in progress",
- ["filename"]="typo-dub",
- ["loading"]="typo-dir",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-ini",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-tal",
- ["loading"]="typo-tal",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-itc",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-krn",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-mar",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-pag",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-drp",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-fln",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-man",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-prc",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-lan",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-rep",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-spa",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="unic-ini",
- ["loading"]="unic-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-deb",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="util-dim",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="util-fmt",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="util-jsn",
- ["loading"]="m-json",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="util-lua",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-lib",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-mrg",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="util-pck",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="util-prs",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="util-seq",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sql",
- ["loading"]="m-sql",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sta",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sto",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="util-str",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="util-tab",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="util-tpl",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="x-asciimath",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="x-calcmath",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="x-cals",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="x-chemml",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="x-ct",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="x-ldx",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="x-mathml",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-ini",
- ["loading"]="publ-ini.mkiv",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-aut",
- ["loading"]="publ-ini.mkiv",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-dat",
- ["loading"]="publ-ini.mkiv",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-oth",
- ["loading"]="publ-ini.mkiv",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-fnd",
- ["loading"]="publ-ini.mkiv",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-tra",
- ["loading"]="publ-ini.mkiv",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-usr",
- ["loading"]="publ-ini.mkiv",
- ["status"]="pending",
- },
- },
- ["main"]={
- {
- ["category"]="mkiv",
- ["filename"]="context",
- ["loading"]="parent",
- ["status"]="okay",
- },
- {
- ["category"]="lus",
- ["comment"]="stub file for context",
- ["filename"]="context",
- ["loading"]="parent",
- ["status"]="okay",
- },
- {
- ["category"]="tex",
- ["filename"]="metatex",
- ["loading"]="parent",
- ["status"]="pending",
- },
- {
- ["category"]="lus",
- ["comment"]="stub file for metatex",
- ["filename"]="metatex",
- ["loading"]="parent",
- ["status"]="pending",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-cs",
- ["loading"]="parent",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-de",
- ["loading"]="parent",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-en",
- ["loading"]="parent",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-fr",
- ["loading"]="parent",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-gb",
- ["loading"]="parent",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-it",
- ["loading"]="parent",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-nl",
- ["loading"]="parent",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-pe",
- ["loading"]="parent",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-ro",
- ["loading"]="parent",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="we keep this around for historic reasons",
- ["filename"]="ppchtex",
- ["loading"]="never",
- ["status"]="okay",
- },
- },
- ["metafun"]={
- {
- ["category"]="mpiv",
- ["comment"]="maybe more delayed loading",
- ["filename"]="metafun",
- ["loading"]="parent",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-base",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-tool",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-mlib",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["comment"]="sort of obsolete",
- ["filename"]="mp-core",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["comment"]="maybe some nicer synonyms",
- ["filename"]="mp-page",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-butt",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-shap",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-grph",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-grid",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["comment"]="a hack anyway",
- ["filename"]="mp-form",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-figs",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-func",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-text",
- ["loading"]="on demand",
- ["status"]="todo",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-crop",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["comment"]="follows m-chart",
- ["filename"]="mp-char",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["comment"]="follows m-steps",
- ["filename"]="mp-step",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-chem",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["comment"]="maybe some namespace changes",
- ["filename"]="mp-abck",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["comment"]="maybe some namespace changes",
- ["filename"]="mp-apos",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["comment"]="will be done when needed",
- ["filename"]="mp-asnc",
- ["loading"]="on demand",
- ["status"]="todo",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-back",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-bare",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-cows",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-fobg",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-grap",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-idea",
- ["loading"]="on demand",
- ["status"]="unknown",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-luas",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-symb",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- },
- ["modules"]={
- {
- ["category"]="mkiv",
- ["comment"]="best use m-zint instead",
- ["filename"]="m-barcodes",
- ["loading"]="module",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="m-chart",
- ["loading"]="module",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="this is a placeholder (chemistry is built-in)",
- ["filename"]="m-chemic",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-cweb",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-database",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="m-nodechart",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="add-on for mtx-context",
- ["filename"]="m-directives",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-educat",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-fields",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-format",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-graph",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-ipsum",
- ["loading"]="module",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-json",
- ["loading"]="module",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-layout",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["comment"]="add-on for mtx-context",
- ["filename"]="m-logcategories",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-markdown",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-mathcrap",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-mkii",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-mkivhacks",
- ["status"]="todo",
- },
- {
- ["category"]="mkvi",
- ["filename"]="m-morse",
- ["loading"]="module",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-narrowtt",
- ["loading"]="module",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-ntb-to-xtb",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-obsolete",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-oldfun",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-oldnum",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-pictex",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-pstricks",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["comment"]="keep an eye on changes in lua code",
- ["filename"]="m-punk",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-spreadsheet",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="m-steps",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-subsub",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-sql",
- ["loading"]="module",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-timing",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="add-on for mtx-context",
- ["filename"]="m-trackers",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-translate",
- ["status"]="okay",
- },
- {
- ["category"]="xsd",
- ["filename"]="x-chemml",
- ["status"]="todo",
- },
- {
- ["category"]="xsd",
- ["filename"]="x-contml",
- ["status"]="todo",
- },
- {
- ["category"]="rng",
- ["filename"]="x-corres",
- ["status"]="todo",
- },
- {
- ["category"]="dtd",
- ["filename"]="x-fig-00",
- ["status"]="todo",
- },
- {
- ["category"]="xsd",
- ["filename"]="x-fig-00",
- ["status"]="todo",
- },
- {
- ["category"]="ctx",
- ["filename"]="x-ldx",
- ["status"]="todo",
- },
- {
- ["category"]="xsd",
- ["filename"]="x-mathml",
- ["status"]="todo",
- },
- {
- ["category"]="xsl",
- ["filename"]="x-om2cml",
- ["status"]="todo",
- },
- {
- ["category"]="xsl",
- ["filename"]="x-openmath",
- ["status"]="todo",
- },
- {
- ["category"]="ctx",
- ["comment"]="runner for x-pfs-01",
- ["filename"]="x-pfsense",
- ["status"]="okay",
- },
- {
- ["category"]="xsd",
- ["filename"]="x-physml",
- ["status"]="todo",
- },
- {
- ["category"]="xsl",
- ["filename"]="x-sm2om",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-units",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-visual",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-zint",
- ["status"]="okay",
- },
- {
- ["category"]="tex",
- ["filename"]="s-abr-01",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-abr-02",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-abr-03",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-abr-04",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-art-01",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-cdr-01",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-def-01",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-faq-00",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-faq-01",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-faq-02",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-faq-03",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-fnt-10",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-fnt-20",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-fnt-21",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-fnt-24",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-fonts-coverage",
- ["loading"]="s-fonts-coverage",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-fonts-features",
- ["loading"]="s-fonts-features",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-fonts-goodies",
- ["loading"]="s-fonts-goodies",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-fonts-missing",
- ["loading"]="s-fonts-missing",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-fonts-shapes",
- ["loading"]="s-fonts-shapes",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-fonts-system",
- ["loading"]="s-fonts-system",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-fonts-tables",
- ["loading"]="s-fonts-tables",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-fonts-vectors",
- ["loading"]="s-fonts-vectors",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="s-inf-01",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-inf-02",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-inf-03",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-inf-04",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="s-languages-counters",
- ["loading"]="s-languages-counters",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-languages-frequencies",
- ["loading"]="s-languages-frequencies",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-languages-hyphenation",
- ["loading"]="s-languages-hyphenation",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-languages-sorting",
- ["loading"]="s-languages-sorting",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-languages-system",
- ["loading"]="s-languages-system",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-mag-01",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-map-10",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-math-characters",
- ["loading"]="s-math-characters",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-math-coverage",
- ["loading"]="s-math-coverage",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-math-extensibles",
- ["loading"]="s-math-extensibles",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-math-parameters",
- ["loading"]="s-math-parameters",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-math-repertoire",
- ["loading"]="s-math-repertoire",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-mod-00",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-mod-01",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-mod-02",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-pages-statistics",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-physics-units",
- ["loading"]="s-physics-units",
- ["status"]="okay",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-00",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-01",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-02",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-03",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-04",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-05",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-06",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-07",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-08",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-09",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-10",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-11",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-12",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-13",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-14",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-15",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-16",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-pre-17",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-18",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-19",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-22",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-23",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-26",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-27",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-pre-30",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-present-tiles",
- ["status"]="okay",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-50",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-pre-60",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-61",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-62",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-63",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-64",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-66",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-67",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-68",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-pre-69",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-pre-70",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-pre-71",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-93",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-96",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-reg-01",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-sql-tables",
- ["loading"]="s-sql-tables",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-asciimath",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-calcmath",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-cals",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-chemml",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-ct",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-entities",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-foxet",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-ldx",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-mathml",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-newmml",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["comment"]="pfsense xml configuration rendering",
- ["filename"]="x-pfs-01",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-physml",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-res-01",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-res-50",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-udhr",
- ["status"]="okay",
- },
- },
- ["optional"]={
- {
- ["category"]="mkiv",
- ["filename"]="bxml-apa",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="colo-run",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="always needs some work",
- ["filename"]="cont-new",
- ["loading"]="runtime",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="font-run",
- ["loading"]="on demand",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["comment"]="this is an experimental module",
- ["filename"]="lxml-ctx",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="this is historic code that we keep around",
- ["filename"]="meta-pdh",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="this is just a helper for generating files",
- ["filename"]="mult-prm",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-run",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="spac-adj",
- ["loading"]="never",
- ["status"]="obsolete",
- },
- {
- ["category"]="mkiv",
- ["comment"]="replaced by a more modern variant",
- ["filename"]="supp-vis",
- ["loading"]="never",
- ["status"]="obsolete",
- },
- {
- ["category"]="mkiv",
- ["filename"]="symb-run",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- },
- ["patterns"]={
- {
- ["category"]="lua",
- ["filename"]="lang-af",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-agr",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-ala",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-bg",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-ca",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-cs",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-cy",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-da",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-de",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-deo",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-es",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-et",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-eu",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-fi",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-fr",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-gb",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-hr",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-hu",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-is",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-it",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-la",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-lt",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-lv",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-ml",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-mn",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-nb",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-nl",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-nn",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-pl",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-pt",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-ro",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-ru",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-sk",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-sl",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-sr",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-sv",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-th",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-tk",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-tr",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-uk",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-us",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-zh",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="word-xx",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- },
- ["resources"]={
- {
- ["category"]="ori",
- ["comment"]="template for a user configuration file (with suffix mkiv)",
- ["filename"]="cont-sys",
- ["loading"]="runtime",
- ["status"]="okay",
- },
- {
- ["category"]="lmx",
- ["filename"]="context-base",
- ["status"]="todo",
- },
- {
- ["category"]="lmx",
- ["filename"]="context-characters",
- ["status"]="todo",
- },
- {
- ["category"]="lmx",
- ["filename"]="context-debug",
- ["status"]="todo",
- },
- {
- ["category"]="lmx",
- ["filename"]="context-error",
- ["status"]="todo",
- },
- {
- ["category"]="lmx",
- ["filename"]="context-fonttest",
- ["status"]="todo",
- },
- {
- ["category"]="lmx",
- ["filename"]="context-help",
- ["status"]="todo",
- },
- {
- ["category"]="lmx",
- ["filename"]="context-timing",
- ["status"]="todo",
- },
- {
- ["category"]="pdf",
- ["filename"]="context-version",
- ["status"]="todo",
- },
- {
- ["category"]="png",
- ["filename"]="context-version",
- ["status"]="todo",
- },
- {
- ["category"]="css",
- ["comment"]="layout specification for debug and error pages and web services",
- ["filename"]="context",
- ["status"]="okay",
- },
- {
- ["category"]="rme",
- ["comment"]="readme file",
- ["filename"]="context",
- ["status"]="okay",
- },
- {
- ["category"]="ctx",
- ["comment"]="example of a ctx file (for mtx-context)",
- ["filename"]="core-ctx",
- ["status"]="okay",
- },
- {
- ["category"]="css",
- ["filename"]="export-example",
- ["status"]="todo",
- },
- {
- ["category"]="rng",
- ["filename"]="export-example",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="export-example",
- ["status"]="todo",
- },
- {
- ["category"]="xml",
- ["comment"]="this file is auto-generated by mtx-language",
- ["filename"]="lang-all",
- ["status"]="okay",
- },
- {
- ["category"]="xml",
- ["filename"]="lpdf-pda",
- ["status"]="todo",
- },
- {
- ["category"]="xml",
- ["filename"]="lpdf-pdx",
- ["status"]="todo",
- },
- {
- ["category"]="rlx",
- ["filename"]="rlxcache",
- ["status"]="todo",
- },
- {
- ["category"]="rlx",
- ["filename"]="rlxtools",
- ["status"]="todo",
- },
- {
- ["category"]="ctx",
- ["filename"]="s-mod",
- ["status"]="todo",
- },
- {
- ["category"]="pdf",
- ["filename"]="status-files",
- ["status"]="todo",
- },
- {
- ["category"]="pdf",
- ["filename"]="status-lua",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="status-mkiv",
- ["status"]="todo",
- },
- },
- ["todo"]={
- {
- ["category"]="lua",
- ["filename"]="core-run",
- ["status"]="idea",
- },
- },
-}
diff --git a/tex/context/base/mkiv/status-mkiv.tex b/tex/context/base/mkiv/status-mkiv.tex
deleted file mode 100644
index 8685c97ad..000000000
--- a/tex/context/base/mkiv/status-mkiv.tex
+++ /dev/null
@@ -1,328 +0,0 @@
-\usemodule[abr-02]
-
-\setupbodyfont
- [dejavu,9pt]
-
-\setuppapersize
- [A4,landscape]
-
-\setuplayout
- [width=middle,
- height=middle,
- backspace=.5cm,
- topspace=.5cm,
- footer=0pt,
- header=1.25cm]
-
-\setuphead
- [title]
- [style=\bfa,
- page=yes,
- after={\blank[line]}]
-
-\setuppagenumbering
- [location=]
-
-\setupheadertexts
- [\currentdate]
- [MkIV Status / Page \pagenumber]
-
-% \showmakeup
-% \showallmakeup
-
-\starttext
-
-% logs.report (immediate) versus logs.messenger (in flow)
-
-\starttitle[title=Todo]
-
-\startitemize[packed]
- \startitem currently the new namespace prefixes are not consistent but this
- will be done when we're satisfied with one scheme \stopitem
- \startitem there will be additional columns in the table, like for namespace
- so we need another round of checking then \stopitem
- \startitem the lua code will be cleaned up upgraded as some is quite old
- and experimental \stopitem
- \startitem we need a proper dependency tree and better defined loading order \stopitem
- \startitem all dotag.. will be moved to the tags_.. namespace \stopitem
- \startitem we need to check what messages are gone (i.e.\ clean up mult-mes) \stopitem
- \startitem some commands can go from mult-def (and the xml file) \stopitem
- \startitem check for setuphandler vs simplesetuphandler \stopitem
- \startitem for the moment we will go for \type {xxxx_} namespaces that (mostly) match
- the filename but later we can replace these by longer names (via a script) so
- module writers should {\bf not} use the core commands with \type{_} in the
- name \stopitem
- \startitem the message system will be unified \stopitem
- \startitem maybe rename dowhatevertexcommand to fromluawhatevertexcommand \stopitem
- \startitem consider moving setups directly to lua end (e.g. in characterspacing, breakpoint, bitmaps etc.) \stopitem
- \startitem more local temporary \type {\temp...} will become \type {\p_...} \stopitem
- \startitem check all ctxlua calls for ctxcommand \stopitem
- \startitem rename all those \type {\current<whatever>}s in strc \stopitem
- \startitem check \type {option} vs \type {options} \stopitem
- \startitem check \type {type} vs \type {kind} \stopitem
- \startitem check \type {label} vs \type {name} vs \type {tag} \stopitem
- \startitem check \type {limop}, different limops should should be classes \stopitem
- \startitem too many positions in simple files (itemize etc) \stopitem
- \startitem math domains/dictionaries \stopitem
- \startitem xtables don't span vertically with multilines (yet) \stopitem
- \startitem notes in mixed columns \stopitem
- \startitem floats in mixed columns \stopitem
- \startitem check return values \type {os.execute} \stopitem
- \startitem more r, d, k in xml code \stopitem
- \startitem mathml, more in \LUA \stopitem
- \startitem style: font-size, font, color handling in \HTML\ (lxml-css) \stopitem
- \startitem a \type {\name {A.B.C DEF}} auto-nobreakspace \stopitem
- \startitem redo \CWEB\ module with \LUA \stopitem
- \startitem maybe move characters.blocks to its own file \stopitem
- \startitem more local context = context in \LUA\ files \stopitem
- \startitem check and optimize all storage.register and locals (cosmetics) \stopitem
- \startitem check all used modules in \LUA\ (and local them) \stopitem
- \startitem environment and basic lua helpers are now spread over too many files \stopitem
- \startitem isolate tracers and showers \stopitem
- \startitem check all possible usage of ctxcommand \stopitem
- \startitem there are more s-* modules, like s-fnt-41 \stopitem
- \startitem check (un)marked tables \stopitem
-\stopitemize
-
-\stoptitle
-
-\starttitle[title=To keep an eye on]
-
-\startitemize[packed]
- \startitem Currently lpeg replacements interpret the percent sign so we need to escape it. \stopitem
- \startitem Currently numbers and strings are cast in comparisons bu tthat might change in the future. \stopitem
-\stopitemize
-
-\stoptitle
-
-\definehighlight[notabenered] [color=darkred, style=bold]
-\definehighlight[notabeneblue] [color=darkblue, style=bold]
-\definehighlight[notabeneyellow] [color=darkyellow, style=bold]
-\definehighlight[notabenemagenta][color=darkmagenta,style=bold]
-
-\startluacode
-
- local coremodules = dofile("status-mkiv.lua")
-
- local valid = table.tohash {
- "toks", "attr", "page", "buff", "font", "colo", "phys", "supp", "typo", "strc",
- "syst", "tabl", "spac", "scrn", "lang", "lxml", "mlib", "java", "pack", "math",
- "symb", "grph", "anch", "luat", "mult", "back", "node", "meta", "norm", "catc",
- "cldf", "file", "char", "core", "layo", "trac", "cont", "regi", "enco", "hand",
- "unic", "sort", "blob", "type", "scrp", "prop", "chem", "bibl", "task",
- "whatever", "mp", "s", "x", "m", "mtx",
- }
-
- local specialcategories = {
- mkvi = true,
- }
-
- local what = {
- "main", "core", "lua", "optional", "implementations", "extra", "extras", "metafun", "modules", "resources"
- }
-
- local totaltodo = 0
- local totalpending = 0
- local totalobsolete = 0
- local totalloaded = 0
-
- local function summary(nofloaded,noftodo,nofpending,nofobsolete)
-
- local nofdone = nofloaded - noftodo - nofpending - nofobsolete
-
- context.starttabulate { "|B|r|" }
- context.HL()
- context.NC() context("done") context.NC() context(nofdone) context.NC() context.NR()
- context.NC() context("todo") context.NC() context(noftodo) context.NC() context.NR()
- context.NC() context("pending") context.NC() context(nofpending) context.NC() context.NR()
- context.NC() context("obsolete") context.NC() context(nofobsolete) context.NC() context.NR()
- context.HL()
- context.NC() context("loaded") context.NC() context(nofloaded) context.NC() context.NR()
- context.HL()
- context.stoptabulate()
-
- end
-
- if coremodules then
-
- local function tabelize(loaded,what)
-
- if loaded then
-
- local noftodo = 0
- local nofpending = 0
- local nofobsolete = 0
- local nofloaded = #loaded
- local categories = { }
-
- for k, v in next, valid do
- categories[k] = { }
- end
-
- for i=1,nofloaded do
- local l = loaded[i]
- l.order = i
- local category = string.match(l.filename,"([^%-]+)%-") or "whatever"
- local c = categories[category]
- if c then
- c[#c+1] = l
- end
- end
-
- for k, loaded in table.sortedhash(categories) do
-
- local nofloaded = #loaded
-
- if nofloaded > 0 then
-
- table.sort(loaded,function(a,b) return a.filename < b.filename end) -- in place
-
- context.starttitle { title = string.format("%s: %s",what,k) }
-
- context.starttabulate { "|Tr|Tlw(3em)|Tlw(12em)|Tlw(12em)|Tlw(4em)|Tl|Tl|Tl|Tp|" }
- context.NC() context.bold("order")
- context.NC() context.bold("kind")
- context.NC() context.bold("file")
- context.NC() context.bold("loading")
- context.NC() context.bold("status")
- context.NC() context.bold("reference")
- context.NC() context.bold("manual")
- context.NC() context.bold("wiki")
- context.NC() context.bold("comment")
- context.NC() context.NR()
- context.HL()
- for i=1,nofloaded do
- local module = loaded[i]
- local status = module.status
- local category = module.category
- local filename = module.filename
- context.NC()
- context(module.order)
- context.NC()
- if specialcategories[category] then
- context.notabeneblue(category)
- else
- context(category)
- end
- context.NC()
- if #filename>20 then
- context(string.sub(filename,1,18) .. "..")
- else
- context(filename)
- end
- context.NC()
- context(module.loading)
- context.NC()
- if status == "todo" then
- context.notabenered(status)
- noftodo = noftodo + 1
- elseif status == "pending" then
- context.notabeneyellow(status)
- nofpending = nofpending + 1
- elseif status == "obsolete" then
- context.notabenemagenta(status)
- nofobsolete = nofobsolete + 1
- else
- context(status)
- end
- context.NC() context(module.reference)
- context.NC() context(module.manual)
- context.NC() context(module.wiki)
- context.NC() context(module.comment)
- context.NC() context.NR()
- end
- context.stoptabulate()
-
- context.stoptitle()
-
- end
-
- end
-
- context.starttitle { title = string.format("summary of %s modules",what) }
-
- summary(nofloaded,noftodo,nofpending,nofobsolete)
-
- context.stoptitle()
-
- totaltodo = totaltodo + noftodo
- totalpending = totalpending + nofpending
- totalobsolete = totalobsolete + nofobsolete
- totalloaded = totalloaded + nofloaded
-
- end
-
- end
-
- for i=1,#what do
- tabelize(coremodules[what[i]],what[i])
- end
-
- end
-
- -- context.starttitle { title = "Valid prefixes" }
- --
- -- for namespace, data in table.sortedhash(namespaces) do
- -- if valid[namespace] then
- -- context.type(namespace)
- -- end
- -- context.par()
- -- end
- --
- -- context.stoptitle()
-
- context.starttitle { title = string.format("summary of all",what) }
-
- summary(totalloaded,totaltodo,totalpending,totalobsolete)
-
- context.stoptitle()
-
- if io.exists("status-namespaces.lua") then
-
- context.starttitle { title = "messy namespaces" }
-
- local namespaces = dofile("status-namespaces.lua")
-
- for namespace, data in table.sortedhash(namespaces) do
- if valid[namespace] then
- else
- context(namespace)
- end
- context.par()
- end
-
- context.stoptitle()
-
- end
-
- if io.exists("status-registers.lua") then
-
- context.starttitle { title = "messy registers" }
-
- local registers = dofile("status-registers.lua")
-
- for register, data in table.sortedhash(registers) do
- context(register)
- context.par()
- for name in table.sortedhash(data) do
- context.quad()
- context.type(name)
- context.par()
- end
- context.par()
- end
-
- context.stoptitle()
-
- end
-
- context.starttitle { title = "callbacks" }
-
- commands.showcallbacks()
-
- context.stoptitle()
-
-\stopluacode
-
-
-\stoptext
diff --git a/tex/context/interface/mkiv/i-context.pdf b/tex/context/interface/mkiv/i-context.pdf
index 23c4a1f20..d8fbecc32 100644
--- a/tex/context/interface/mkiv/i-context.pdf
+++ b/tex/context/interface/mkiv/i-context.pdf
Binary files differ
diff --git a/tex/context/interface/mkiv/i-readme.pdf b/tex/context/interface/mkiv/i-readme.pdf
index 95d456ece..1bcd2b2f9 100644
--- a/tex/context/interface/mkiv/i-readme.pdf
+++ b/tex/context/interface/mkiv/i-readme.pdf
Binary files differ
diff --git a/tex/context/sample/common/jojomayer.tex b/tex/context/sample/common/jojomayer.tex
new file mode 100644
index 000000000..966b356a6
--- /dev/null
+++ b/tex/context/sample/common/jojomayer.tex
@@ -0,0 +1,3 @@
+If we surrender the thing that separates us from machines, we will be replaced by
+machines. The more advanced machines will be, the more human we will have to
+become.
diff --git a/tex/context/sample/common/mcnish.tex b/tex/context/sample/common/mcnish.tex
new file mode 100644
index 000000000..ab09ca0bc
--- /dev/null
+++ b/tex/context/sample/common/mcnish.tex
@@ -0,0 +1,73 @@
+% poetry, music and typesetting
+%
+% https://www.youtube.com/watch?v=SB8_2Yuj8Og
+%
+% Published on Nov 1, 2016
+% Taken from 'Poetry versus Orchestra'. Album by Hollie McNish and Metropole Orkest, conducted by Jules Buckley.
+% Released on MO Black, fall 2016.
+% www.mo.nl/poetry • www.holliepoetry.com
+%
+% Poem written by Hollie McNish, music by Jules Buckley
+% Artwork by me studio, Martin Pyper
+%
+MATHEMATICS
+
+he said
+“those goddamn pakistanis and their goddamn corner shops
+built a shop on every corner, took our british workers jobs”
+he said
+“those goddamn chinese and their goddamn china shops”
+i told him they’re from vietnam, but he didn’t give a toss
+i ask him what was there before that “damn japan man’s shop?”
+he looks at me and dreams a scene of british workers’ jobs
+of full-time, full-employment before the “goddamn boats all came”
+where everybody went to work for full-time full-hours every day
+“a british business stood their first”
+he claims
+“before the irish came
+now british people lost their jobs and bloody turkish they're to blame”
+i ask him how he knows that fact, he says, “because it’s true”
+i ask him how he knows it’s fact, he says, he read it in the news,
+“every time a somali comes here they take a job from us
+the mathematics one for one, from us to them, it just adds up.”
+he bites his cake, he sips his brew, he says again he knows the plot
+“the goddamn caribbean’s came and now good folk here don’t have jobs”
+i ask him what was there before the “goddamn persian curtain shop”
+i show him architectures’ plans
+of empty goddamn plots of land
+I show him the historic maps
+a bit of sand
+a barren land
+there was no goddamn shop before that pakistani came and planned
+man - i am sick of crappy mathematics
+cos i love a bit of sums
+i spent years into economics
+and i geek out over calculus
+and when i meet these paper claims
+that one of every new that came
+takes away ‘our’ daily wage
+i desperately want to scream
+“your math is stuck-in primary”
+cos some who come here also spend
+and some who come here also lend
+and some who come here also tend
+to set up work which employs them
+and all thosebalance sheets and trends
+they work with numbers, not with men
+and all this goddamn heated talk
+ignores the trade the polish brought
+ignores the men they give work to
+not plumbing jobs but further too
+ignores the guys they buy stock from
+accountants, builders, on and on
+and i know it’s nice to have someone
+to blame our lack of jobs upon
+but immigration’s not that plain
+despite the sums inside our brains
+as one for one
+as him for you
+as if he goes ‘home’, they’d employ you
+cos sometimes one that comes makes two
+and sometimes one can add three more
+and sometimes two times two is much, much more than four
+and most times immigrants bring more than minuses.
diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua
index a94ba85af..0c8e40ce4 100644
--- a/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : c:/data/develop/context/sources/luatex-fonts-merged.lua
-- parent file : c:/data/develop/context/sources/luatex-fonts.lua
--- merge date : 05/25/17 12:50:49
+-- merge date : 05/27/17 18:52:27
do -- begin closure to overcome local limits and interference
@@ -7758,7 +7758,7 @@ function constructors.scale(tfmdata,specification)
target.unscaled=tfmdata
local mathsize=tonumber(specification.mathsize) or 0
local textsize=tonumber(specification.textsize) or scaledpoints
- local forcedsize=tonumber(parameters.mathsize ) or 0
+ local forcedsize=tonumber(parameters.mathsize ) or 0
local extrafactor=tonumber(specification.factor ) or 1
if (mathsize==2 or forcedsize==2) and parameters.scriptpercentage then
scaledpoints=parameters.scriptpercentage*textsize/100
@@ -7766,6 +7766,7 @@ function constructors.scale(tfmdata,specification)
scaledpoints=parameters.scriptscriptpercentage*textsize/100
elseif forcedsize>1000 then
scaledpoints=forcedsize
+ else
end
targetparameters.mathsize=mathsize
targetparameters.textsize=textsize
@@ -8827,7 +8828,7 @@ do -- begin closure to overcome local limits and interference
if not modules then modules={} end modules ['font-cid']={
version=1.001,
- comment="companion to font-otf.lua (cidmaps)",
+ comment="companion to font-ini.mkiv",
author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
@@ -19850,7 +19851,7 @@ local function checkmathsize(tfmdata,mathsize)
local parameters=tfmdata.parameters
parameters.scriptpercentage=mathdata.ScriptPercentScaleDown
parameters.scriptscriptpercentage=mathdata.ScriptScriptPercentScaleDown
- parameters.mathsize=mathsize
+ parameters.mathsize=mathsize
end
end
registerotffeature {
@@ -21771,7 +21772,7 @@ do -- begin closure to overcome local limits and interference
if not modules then modules={} end modules ['font-ota']={
version=1.001,
- comment="companion to font-otf.lua (analysing)",
+ comment="companion to font-ini.mkiv",
author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
@@ -27534,7 +27535,7 @@ do -- begin closure to overcome local limits and interference
if not modules then modules={} end modules ['font-ocl']={
version=1.001,
- comment="companion to font-otf.lua (context)",
+ comment="companion to font-ini.mkiv",
author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
@@ -27982,7 +27983,7 @@ do -- begin closure to overcome local limits and interference
if not modules then modules={} end modules ['font-otc']={
version=1.001,
- comment="companion to font-otf.lua (context)",
+ comment="companion to font-ini.mkiv",
author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
@@ -29922,7 +29923,7 @@ do -- begin closure to overcome local limits and interference
if not modules then modules={} end modules ['font-afk']={
version=1.001,
- comment="companion to font-afm.lua",
+ comment="companion to font-lib.mkiv",
author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files",
diff --git a/tex/generic/context/luatex/luatex-fonts.lua b/tex/generic/context/luatex/luatex-fonts.lua
index 20690992c..93ead749e 100644
--- a/tex/generic/context/luatex/luatex-fonts.lua
+++ b/tex/generic/context/luatex/luatex-fonts.lua
@@ -231,21 +231,6 @@ if non_generic_context.luatex_fonts.skip_loading ~= true then
loadmodule('luatex-fonts-syn.lua')
loadmodule('font-oti.lua')
-
- -- These are the old loader and processing modules. These use the built-in font loader and
- -- will stay around (but not be extended), only fixed.
-
- -- font-otf.lua
- -- font-otb.lua
- -- font-inj.lua
- -- font-ota.lua
- -- font-otn.lua
- -- font-otp.lua
-
- -- Here come the new loader and processing modules. The loader is written in Lua and although
- -- initial loading is somewhat slower, identifying is faster, cached files can be slightly
- -- more efficient, and processing is somewhat faster (only measureable on complex fonts).
-
loadmodule('font-otr.lua')
loadmodule('font-cff.lua')
loadmodule('font-ttf.lua')
@@ -258,7 +243,6 @@ if non_generic_context.luatex_fonts.skip_loading ~= true then
loadmodule('font-ots.lua')
loadmodule('font-osd.lua')
loadmodule('font-ocl.lua') -- svg needs 0.97 (for fix in memstreams)
-
loadmodule('font-otc.lua')
-- type one code
@@ -277,7 +261,7 @@ if non_generic_context.luatex_fonts.skip_loading ~= true then
loadmodule('font-def.lua')
loadmodule('font-xtx.lua') -- xetex compatible specifiers (plain/latex only)
loadmodule('luatex-fonts-ext.lua') -- some extensions
- -- loadmodule('luatex-fonts-lig.lua') -- and another one
+ loadmodule('luatex-fonts-lig.lua') -- and another one
-- We need to plug into a callback and the following module implements the handlers. Actual
-- plugging in happens later.