summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMarius <mariausol@gmail.com>2011-04-13 10:40:15 +0300
committerMarius <mariausol@gmail.com>2011-04-13 10:40:15 +0300
commit930f95164ea82514ff24bf71c6baddd40a569766 (patch)
treeafb4207c4831a30390a0e2f2c76354dd583da27f
parent2721c3d0c46d65ee01f935ddd959abdd53212377 (diff)
downloadcontext-930f95164ea82514ff24bf71c6baddd40a569766.tar.gz
beta 2011.04.13 09:23
-rw-r--r--scripts/context/lua/mtxrun.lua10
-rw-r--r--scripts/context/stubs/mswin/mtxrun.lua10
-rw-r--r--scripts/context/stubs/unix/mtxrun10
-rw-r--r--tex/context/base/back-exp.lua96
-rw-r--r--tex/context/base/back-exp.mkiv2
-rw-r--r--tex/context/base/cont-new.mkii2
-rw-r--r--tex/context/base/cont-new.mkiv2
-rw-r--r--tex/context/base/context.mkii2
-rw-r--r--tex/context/base/context.mkiv2
-rw-r--r--tex/context/base/core-con.mkiv2
-rw-r--r--tex/context/base/data-tmp.lua2
-rw-r--r--tex/context/base/font-ini.mkiv2
-rw-r--r--tex/context/base/font-mis.lua2
-rw-r--r--tex/context/base/font-ota.lua9
-rw-r--r--tex/context/base/font-otd.lua9
-rw-r--r--tex/context/base/font-otf.lua95
-rw-r--r--tex/context/base/font-otn.lua389
-rw-r--r--tex/context/base/font-otp.lua24
-rw-r--r--tex/context/base/lpdf-epd.lua5
-rw-r--r--tex/context/base/node-ini.lua2
-rw-r--r--tex/context/base/s-abr-01.tex1
-rw-r--r--tex/context/base/scrn-pag.mkvi125
-rw-r--r--tex/context/base/status-files.pdfbin23581 -> 23665 bytes
-rw-r--r--tex/context/base/status-lua.pdfbin154910 -> 154918 bytes
-rw-r--r--tex/context/base/trac-set.lua6
-rw-r--r--tex/context/base/util-deb.lua2
-rw-r--r--tex/generic/context/luatex-fonts-merged.lua1118
-rw-r--r--tex/generic/context/luatex-fonts.lua1
28 files changed, 1505 insertions, 425 deletions
diff --git a/scripts/context/lua/mtxrun.lua b/scripts/context/lua/mtxrun.lua
index 9ee6ed0e4..9ce73a168 100644
--- a/scripts/context/lua/mtxrun.lua
+++ b/scripts/context/lua/mtxrun.lua
@@ -4547,7 +4547,7 @@ function inspect(i) -- global function
if ti == "table" then
table.print(i,"table")
elseif is_node and is_node(i) then
- print(node.sequenced(i))
+ table.print(nodes.astable(i),tostring(i))
else
print(tostring(i))
end
@@ -4842,7 +4842,7 @@ local function set(t,what,newvalue)
else
value = is_boolean(value,value)
end
- w = escapedpattern(w,true)
+ w = "^" .. escapedpattern(w,true) .. "$" -- new: anchored
for name, functions in next, data do
if done[name] then
-- prevent recursion due to wildcards
@@ -5057,11 +5057,11 @@ local flags = environment and environment.engineflags
if flags then
if trackers and flags.trackers then
- setters.initialize("flags","trackers", utilities.parsers.settings_to_hash(flags.trackers))
+ setters.initialize("flags","trackers", settings_to_hash(flags.trackers))
-- t_enable(flags.trackers)
end
if directives and flags.directives then
- setters.initialize("flags","directives", utilities.parsers.settings_to_hash(flags.directives))
+ setters.initialize("flags","directives", settings_to_hash(flags.directives))
-- d_enable(flags.directives)
end
end
@@ -10724,7 +10724,7 @@ function caches.is_writable(filepath,filename)
return file.is_writable(tmaname)
end
-local saveoptions = { reduce = true }
+local saveoptions = { compact = true }
function caches.savedata(filepath,filename,data,raw)
local tmaname, tmcname = caches.setluanames(filepath,filename)
diff --git a/scripts/context/stubs/mswin/mtxrun.lua b/scripts/context/stubs/mswin/mtxrun.lua
index 9ee6ed0e4..9ce73a168 100644
--- a/scripts/context/stubs/mswin/mtxrun.lua
+++ b/scripts/context/stubs/mswin/mtxrun.lua
@@ -4547,7 +4547,7 @@ function inspect(i) -- global function
if ti == "table" then
table.print(i,"table")
elseif is_node and is_node(i) then
- print(node.sequenced(i))
+ table.print(nodes.astable(i),tostring(i))
else
print(tostring(i))
end
@@ -4842,7 +4842,7 @@ local function set(t,what,newvalue)
else
value = is_boolean(value,value)
end
- w = escapedpattern(w,true)
+ w = "^" .. escapedpattern(w,true) .. "$" -- new: anchored
for name, functions in next, data do
if done[name] then
-- prevent recursion due to wildcards
@@ -5057,11 +5057,11 @@ local flags = environment and environment.engineflags
if flags then
if trackers and flags.trackers then
- setters.initialize("flags","trackers", utilities.parsers.settings_to_hash(flags.trackers))
+ setters.initialize("flags","trackers", settings_to_hash(flags.trackers))
-- t_enable(flags.trackers)
end
if directives and flags.directives then
- setters.initialize("flags","directives", utilities.parsers.settings_to_hash(flags.directives))
+ setters.initialize("flags","directives", settings_to_hash(flags.directives))
-- d_enable(flags.directives)
end
end
@@ -10724,7 +10724,7 @@ function caches.is_writable(filepath,filename)
return file.is_writable(tmaname)
end
-local saveoptions = { reduce = true }
+local saveoptions = { compact = true }
function caches.savedata(filepath,filename,data,raw)
local tmaname, tmcname = caches.setluanames(filepath,filename)
diff --git a/scripts/context/stubs/unix/mtxrun b/scripts/context/stubs/unix/mtxrun
index 9ee6ed0e4..9ce73a168 100644
--- a/scripts/context/stubs/unix/mtxrun
+++ b/scripts/context/stubs/unix/mtxrun
@@ -4547,7 +4547,7 @@ function inspect(i) -- global function
if ti == "table" then
table.print(i,"table")
elseif is_node and is_node(i) then
- print(node.sequenced(i))
+ table.print(nodes.astable(i),tostring(i))
else
print(tostring(i))
end
@@ -4842,7 +4842,7 @@ local function set(t,what,newvalue)
else
value = is_boolean(value,value)
end
- w = escapedpattern(w,true)
+ w = "^" .. escapedpattern(w,true) .. "$" -- new: anchored
for name, functions in next, data do
if done[name] then
-- prevent recursion due to wildcards
@@ -5057,11 +5057,11 @@ local flags = environment and environment.engineflags
if flags then
if trackers and flags.trackers then
- setters.initialize("flags","trackers", utilities.parsers.settings_to_hash(flags.trackers))
+ setters.initialize("flags","trackers", settings_to_hash(flags.trackers))
-- t_enable(flags.trackers)
end
if directives and flags.directives then
- setters.initialize("flags","directives", utilities.parsers.settings_to_hash(flags.directives))
+ setters.initialize("flags","directives", settings_to_hash(flags.directives))
-- d_enable(flags.directives)
end
end
@@ -10724,7 +10724,7 @@ function caches.is_writable(filepath,filename)
return file.is_writable(tmaname)
end
-local saveoptions = { reduce = true }
+local saveoptions = { compact = true }
function caches.savedata(filepath,filename,data,raw)
local tmaname, tmcname = caches.setluanames(filepath,filename)
diff --git a/tex/context/base/back-exp.lua b/tex/context/base/back-exp.lua
index 8450b36f1..0d5b7cede 100644
--- a/tex/context/base/back-exp.lua
+++ b/tex/context/base/back-exp.lua
@@ -18,6 +18,8 @@ if not modules then modules = { } end modules ['back-exp'] = {
-- todo: less attributes e.g. internal only first node
-- todo: build xml tree in mem (handy for cleaning)
+-- delimited: left/right string (needs marking)
+
local nodecodes = nodes.nodecodes
local traverse_nodes = node.traverse
local hlist_code = nodecodes.hlist
@@ -59,6 +61,8 @@ local nodes = nodes
local attributes = attributes
local variables = interfaces.variables
+local settings_to_array = utilities.parsers.settings_to_array
+
local setmetatableindex = table.setmetatableindex
local tasks = nodes.tasks
local fontchar = fonts.hashes.characters
@@ -136,22 +140,59 @@ local extras = { }
local nofbreaks = 0
local used = { }
local exporting = false
-
-setmetatableindex(used, function(t,k) if k then local v = { } t[k] = v return v end end)
-
local last = nil
local lastpar = nil
-local joiner_1 = " "
-local joiner_2 = " " -- todo: test if this one can always be ""
-local joiner_3 = " "
-local joiner_4 = " "
-local joiner_5 = " "
-local joiner_6 = " "
-local joiner_7 = "\n"
-local joiner_8 = " "
-local joiner_9 = " "
-local joiner_0 = " "
+setmetatableindex(used, function(t,k)
+ if k then
+ local v = { }
+ t[k] = v
+ return v
+ end
+end)
+
+local joiner_1 = " "
+local joiner_2 = " " -- todo: test if this one can always be ""
+local joiner_3 = " "
+local joiner_4 = " "
+local joiner_5 = " "
+local joiner_6 = " "
+local joiner_7 = "\n"
+local joiner_8 = " "
+local joiner_9 = " "
+local joiner_0 = " "
+
+local namespaced = {
+ -- filled on
+}
+
+local namespaces = {
+ msubsup = "m",
+ msub = "m",
+ msup = "m",
+ mn = "m",
+ mi = "m",
+ ms = "m",
+ mo = "m",
+ mtext = "m",
+ mrow = "m",
+ mfrac = "m",
+ mroot = "m",
+ msqrt = "m",
+ munderover = "m",
+ munder = "m",
+ mover = "m",
+ merror = "m",
+ math = "m",
+ mrow = "m",
+}
+
+setmetatableindex(namespaced, function(t,k)
+ local namespace = namespaces[k]
+ local v = namespace and namespace .. ":" .. k or k
+ t[k] = v
+ return v
+end)
-- local P, C, Cc = lpeg.P, lpeg.C, lpeg.Cc
--
@@ -218,6 +259,7 @@ function extras.document(handle,element,detail,n,fulltag,hash)
handle:write(format(" date=%q",os.date()))
handle:write(format(" context=%q",environment.version))
handle:write(format(" version=%q",version))
+ handle:write(format(" xmlns:m=%q","http://www.w3.org/1998/Math/MathML"))
local identity = interactions.general.getidentity()
for i=1,#fields do
local key = fields[i]
@@ -588,7 +630,7 @@ function extras.tabulatecell(handle,element,detail,n,fulltag,di)
end
local function emptytag(handle,element,nature,depth)
- handle:write("\n",spaces[depth],"<",element,"/>\n")
+ handle:write("\n",spaces[depth],"<",namespaced[element],"/>\n")
end
local function begintag(handle,element,nature,depth,di,empty)
@@ -614,7 +656,7 @@ local function begintag(handle,element,nature,depth,di,empty)
linedone = false
end
end
- handle:write("<",element)
+ handle:write("<",namespaced[element])
if detail then
handle:write(" detail='",detail,"'")
end
@@ -651,14 +693,14 @@ local function endtag(handle,element,nature,depth,empty)
if not linedone then
handle:write("\n")
end
- handle:write(spaces[depth],"</",element,">\n")
+ handle:write(spaces[depth],"</",namespaced[element],">\n")
end
linedone = true
else
if empty then
handle:write("/>")
else
- handle:write("</",element,">")
+ handle:write("</",namespaced[element],">")
end
end
else
@@ -666,7 +708,7 @@ local function endtag(handle,element,nature,depth,empty)
if empty then
handle:write("/>")
else
- handle:write("</",element,">")
+ handle:write("</",namespaced[element],">")
end
linedone = false
end
@@ -1025,13 +1067,17 @@ local function stopexport(v)
report_export("saving xml data in '%s",xmlfile)
handle:write(format(xmlpreamble,tex.jobname,os.date(),environment.version,version))
if cssfile then
- if type(v) ~= "string" or cssfile == variables.yes or cssfile == "" or cssfile == xmlfile then
- cssfile = file.replacesuffix(xmlfile,"css")
- else
- cssfile = file.addsuffix(cssfile,"css")
+ local cssfiles = settings_to_array(cssfile)
+ for i=1,#cssfiles do
+ local cssfile = cssfiles[i]
+ if type(cssfile) ~= "string" or cssfile == variables.yes or cssfile == "" or cssfile == xmlfile then
+ cssfile = file.replacesuffix(xmlfile,"css")
+ else
+ cssfile = file.addsuffix(cssfile,"css")
+ end
+ report_export("adding css reference '%s",cssfile)
+ handle:write(format(csspreamble,cssfile))
end
- report_export("adding css reference '%s",cssfile)
- handle:write(format(csspreamble,cssfile))
end
flushtree(handle,tree.data)
handle:close()
@@ -1104,7 +1150,7 @@ local function startexport(v)
end
end
-directives.register("backend.export",startexport)
+directives.register("backend.export",startexport) -- maybe .name
local function injectbreak()
flushresult(entry)
diff --git a/tex/context/base/back-exp.mkiv b/tex/context/base/back-exp.mkiv
index 2da163a7e..09eaf0109 100644
--- a/tex/context/base/back-exp.mkiv
+++ b/tex/context/base/back-exp.mkiv
@@ -116,7 +116,7 @@
\enabledirectives
[backend.export=\backendparameter\c!export,%
backend.export.xhtml=\backendparameter\c!xhtml,%
- backend.export.css=\backendparameter\c!css]}%
+ backend.export.css={\backendparameter\c!css}]}%
\to \everysetupbackend
\protect \endinput
diff --git a/tex/context/base/cont-new.mkii b/tex/context/base/cont-new.mkii
index 7c6708b07..02109ffe8 100644
--- a/tex/context/base/cont-new.mkii
+++ b/tex/context/base/cont-new.mkii
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2011.04.11 18:55}
+\newcontextversion{2011.04.13 09:23}
%D This file is loaded at runtime, thereby providing an
%D excellent place for hacks, patches, extensions and new
diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv
index 952be95dc..0953026d0 100644
--- a/tex/context/base/cont-new.mkiv
+++ b/tex/context/base/cont-new.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2011.04.11 18:55}
+\newcontextversion{2011.04.13 09:23}
%D This file is loaded at runtime, thereby providing an
%D excellent place for hacks, patches, extensions and new
diff --git a/tex/context/base/context.mkii b/tex/context/base/context.mkii
index 78ab41d79..5101aaa80 100644
--- a/tex/context/base/context.mkii
+++ b/tex/context/base/context.mkii
@@ -20,7 +20,7 @@
%D your styles an modules.
\edef\contextformat {\jobname}
-\edef\contextversion{2011.04.11 18:55}
+\edef\contextversion{2011.04.13 09:23}
%D For those who want to use this:
diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv
index 4028bc61b..070b2c3e8 100644
--- a/tex/context/base/context.mkiv
+++ b/tex/context/base/context.mkiv
@@ -20,7 +20,7 @@
%D your styles an modules.
\edef\contextformat {\jobname}
-\edef\contextversion{2011.04.11 18:55}
+\edef\contextversion{2011.04.13 09:23}
%D For those who want to use this:
diff --git a/tex/context/base/core-con.mkiv b/tex/context/base/core-con.mkiv
index 75f2b6acd..e43e4ecda 100644
--- a/tex/context/base/core-con.mkiv
+++ b/tex/context/base/core-con.mkiv
@@ -519,7 +519,7 @@
\normalmonth\@@dam\relax
\normalyear \@@day\relax
\fi
- \docurrentdate{#2}%
+ \docurrentdate[#2]%
\endgroup}
%D \macros
diff --git a/tex/context/base/data-tmp.lua b/tex/context/base/data-tmp.lua
index 55986d727..ec6f91e24 100644
--- a/tex/context/base/data-tmp.lua
+++ b/tex/context/base/data-tmp.lua
@@ -290,7 +290,7 @@ function caches.is_writable(filepath,filename)
return file.is_writable(tmaname)
end
-local saveoptions = { reduce = true }
+local saveoptions = { compact = true }
function caches.savedata(filepath,filename,data,raw)
local tmaname, tmcname = caches.setluanames(filepath,filename)
diff --git a/tex/context/base/font-ini.mkiv b/tex/context/base/font-ini.mkiv
index b179aae35..b0efeaee1 100644
--- a/tex/context/base/font-ini.mkiv
+++ b/tex/context/base/font-ini.mkiv
@@ -991,11 +991,13 @@
\def\dododefinefontsynonymnop[#1]%
{\let\@@ff@@features \undefined
\let\@@ff@@fallbacks\undefined
+ \let\@@ff@@goodies \undefined
\expandafter\dogetfontparameternop#1,]=,}
\def\dododefinefontsynonymyes[#1]%
{\let\@@ff@@features \undefined
\let\@@ff@@fallbacks\undefined
+ \let\@@ff@@goodies \undefined
\expandafter\dogetfontparameteryes#1,]=,}
\def\dogetfontparameternop#1=#2,%
diff --git a/tex/context/base/font-mis.lua b/tex/context/base/font-mis.lua
index 3de1cd30d..6d67e70f6 100644
--- a/tex/context/base/font-mis.lua
+++ b/tex/context/base/font-mis.lua
@@ -22,7 +22,7 @@ local handlers = fonts.handlers
handlers.otf = handlers.otf or { }
local otf = handlers.otf
-otf.version = otf.version or 2.722
+otf.version = otf.version or 2.727
otf.cache = otf.cache or containers.define("fonts", "otf", otf.version, true)
function otf.loadcached(filename,format,sub)
diff --git a/tex/context/base/font-ota.lua b/tex/context/base/font-ota.lua
index cb41194ee..1bf736531 100644
--- a/tex/context/base/font-ota.lua
+++ b/tex/context/base/font-ota.lua
@@ -56,8 +56,6 @@ process features right.</p>
-- todo: analyzers per script/lang, cross font, so we need an font id hash -> script
-- e.g. latin -> hyphenate, arab -> 1/2/3 analyze -- its own namespace
--- an example analyzer (should move to font-ota.lua)
-
local state = attributes.private('state')
function analyzers.setstate(head,font)
@@ -165,7 +163,8 @@ registerotffeature {
methods.latn = analyzers.setstate
--- this info eventually will go into char-def
+-- this info eventually will go into char-def adn we will have a state
+-- table for generic then
local zwnj = 0x200C
local zwj = 0x200D
@@ -351,3 +350,7 @@ function methods.arab(head,font,attr) -- maybe make a special version with no tr
first, last = finish(first,last)
return head, done
end
+
+directives.register("otf.analyze.useunicodemarks",function(v)
+ analyzers.useunicodemarks = v
+end)
diff --git a/tex/context/base/font-otd.lua b/tex/context/base/font-otd.lua
index 84811f0e1..b22889217 100644
--- a/tex/context/base/font-otd.lua
+++ b/tex/context/base/font-otd.lua
@@ -77,6 +77,7 @@ function otf.setdynamics(font,attribute)
shared.features = { }
-- end of save
local set = constructors.checkedfeatures("otf",features)
+set.mode = "node" -- really needed
dsla = otf.setfeatures(tfmdata,set)
if trace_dynamics then
report_otf("setting dynamics %s: attribute %s, script %s, language %s, set: %s",contextnumbers[attribute],attribute,script,language,table.sequenced(set))
@@ -117,7 +118,7 @@ local resolved = { } -- we only resolve a font,script,language,attribute pair on
local wildcard = "*"
local default = "dflt"
-local function initialize(sequence,script,language,s_enabled,a_enabled,attr,dynamic)
+local function initialize(sequence,script,language,s_enabled,a_enabled,font,attr,dynamic)
local features = sequence.features
if features then
for kind, scripts in next, features do
@@ -149,8 +150,8 @@ local function initialize(sequence,script,language,s_enabled,a_enabled,attr,dyna
if trace_applied then
local typ, action = match(sequence.type,"(.*)_(.*)") -- brrr
report_process(
- "%s font: %03i, dynamic: %03i, kind: %s, lookup: %3i, script: %-4s, language: %-4s (%-4s), type: %s, action: %s, name: %s",
- (valid and "+") or "-",font,attr or 0,kind,s,script,language,what,typ,action,sequence.name)
+ "%s font: %03i, dynamic: %03i, kind: %s, script: %-4s, language: %-4s (%-4s), type: %s, action: %s, name: %s",
+ (valid and "+") or "-",font,attr or 0,kind,script,language,what,typ,action,sequence.name)
end
return { valid, attribute, sequence.chain or 0, kind }
end
@@ -207,7 +208,7 @@ function otf.dataset(tfmdata,sequences,font,attr)
ra = { }
rl[attr] = ra
setmetatableindex(ra, function(t,k)
- local v = initialize(sequences[k],script,language,s_enabled,a_enabled,attr,dynamic)
+ local v = initialize(sequences[k],script,language,s_enabled,a_enabled,font,attr,dynamic)
t[k] = v
return v
end)
diff --git a/tex/context/base/font-otf.lua b/tex/context/base/font-otf.lua
index e66e3c01b..8faa88b64 100644
--- a/tex/context/base/font-otf.lua
+++ b/tex/context/base/font-otf.lua
@@ -22,7 +22,7 @@ local getn = table.getn
local lpegmatch = lpeg.match
local reversed, concat, remove = table.reversed, table.concat, table.remove
local ioflush = io.flush
-local fastcopy = table.fastcopy
+local fastcopy, tohash = table.fastcopy, table.tohash
local allocate = utilities.storage.allocate
local registertracker = trackers.register
@@ -47,7 +47,7 @@ local otf = fonts.handlers.otf
otf.glists = { "gsub", "gpos" }
-otf.version = 2.722 -- beware: also sync font-mis.lua
+otf.version = 2.727 -- beware: also sync font-mis.lua
otf.cache = containers.define("fonts", "otf", otf.version, true)
local fontdata = fonts.hashes.identifiers
@@ -1017,6 +1017,11 @@ actions["prepare lookups"] = function(data,filename,raw)
end
end
+-- The reverse handler does a bit redundant splitting but it's seldom
+-- seen so we don' tbother too much. We could store the replacement
+-- in the current list (value instead of true) but it makes other code
+-- uglier. Maybe some day.
+
local function t_uncover(splitter,cache,covers)
local result = { }
for n=1,#covers do
@@ -1031,6 +1036,26 @@ local function t_uncover(splitter,cache,covers)
return result
end
+local function t_hashed(t,cache)
+ if t then
+ local h = { }
+ for i=1,#t do
+ local ti = t[i]
+ local h = cache[ti]
+ if not h then
+ h = { }
+ for i=1,#ti do
+ h[ti] = true
+ end
+ end
+ cache[ti] = h
+ end
+ return h
+ else
+ return nil
+ end
+end
+
local function s_uncover(splitter,cache,cover)
if cover == "" then
return nil
@@ -1038,17 +1063,44 @@ local function s_uncover(splitter,cache,cover)
local uncovered = cache[cover]
if not uncovered then
uncovered = lpegmatch(splitter,cover)
+ for i=1,#uncovered do
+ uncovered[i] = { [uncovered[i]] = true }
+ end
cache[cover] = uncovered
end
return uncovered
end
end
+local s_hashed = t_hashed
+
+local function r_uncover(splitter,cache,cover,replacements)
+ if cover == "" then
+ return nil
+ else
+ -- we always have current as { } even in the case of one
+ local uncovered = cover[1]
+ local replaced = cache[replacements]
+ if not replaced then
+ replaced = lpegmatch(splitter,replacements)
+ cache[replacements] = replaced
+ end
+ local nu, nr = #uncovered, #replaced
+ local r = { }
+ if nu == nr then
+ for i=1,nu do
+ r[uncovered[i]] = replaced[i]
+ end
+ end
+ return r
+ end
+end
+
actions["reorganize lookups"] = function(data,filename,raw)
-- we prefer the before lookups in a normal order
if data.lookups then
local splitter = data.helpers.tounicodetable
- local cache = { }
+ local cache, h_cache = { }, { }
for _, lookup in next, data.lookups do
local rules = lookup.rules
if rules then
@@ -1074,7 +1126,7 @@ actions["reorganize lookups"] = function(data,filename,raw)
for i=1,#before do
before[i] = before_class[before[i]] or { }
end
- rule.before = before
+ rule.before = t_hashed(before,h_cache)
end
local current = class.current
local lookups = rule.lookups
@@ -1085,14 +1137,14 @@ actions["reorganize lookups"] = function(data,filename,raw)
lookups[i] = false
end
end
- rule.current = current
+ rule.current = t_hashed(current,h_cache)
end
local after = class.after
if after then
for i=1,#after do
after[i] = after_class[after[i]] or { }
end
- rule.after = after
+ rule.after = t_hashed(after,h_cache)
end
rule.class = nil
end
@@ -1107,39 +1159,45 @@ actions["reorganize lookups"] = function(data,filename,raw)
if coverage then
local before = coverage.before
if before then
- rule.before = t_uncover(splitter,cache,reversed(before))
+ before = t_uncover(splitter,cache,reversed(before))
+ rule.before = t_hashed(before,h_cache)
end
local current = coverage.current
if current then
- rule.current = t_uncover(splitter,cache,current)
+ current = t_uncover(splitter,cache,current)
+ rule.current = t_hashed(current,h_cache)
end
local after = coverage.after
if after then
- rule.after = t_uncover(splitter,cache,after)
+ after = t_uncover(splitter,cache,after)
+ rule.after = t_hashed(after,h_cache)
end
rule.coverage = nil
end
end
- elseif format == "reversecoverage" then
+ elseif format == "reversecoverage" then -- special case, single substitution only
for i=1,#rules do
local rule = rules[i]
local reversecoverage = rule.reversecoverage
if reversecoverage then
local before = reversecoverage.before
if before then
- rule.before = t_uncover(splitter,cache,reversed(before))
+ before = t_uncover(splitter,cache,reversed(before))
+ rule.before = t_hashed(before,h_cache)
end
local current = reversecoverage.current
if current then
- rule.current = t_uncover(splitter,cache,current)
+ current = t_uncover(splitter,cache,current)
+ rule.current = t_hashed(current,h_cache)
end
local after = reversecoverage.after
if after then
- rule.after = t_uncover(splitter,cache,after)
+ after = t_uncover(splitter,cache,after)
+ rule.after = t_hashed(after,h_cache)
end
local replacements = reversecoverage.replacements
if replacements then
- rule.replacements = s_uncover(splitter,cache,replacements)
+ rule.replacements = r_uncover(splitter,cache,current,replacements)
end
rule.reversecoverage = nil
end
@@ -1151,15 +1209,18 @@ actions["reorganize lookups"] = function(data,filename,raw)
if glyphs then
local fore = glyphs.fore
if fore then
- rule.fore = s_uncover(splitter,cache,fore)
+ fore = s_uncover(splitter,cache,fore)
+ rule.before = s_hashed(fore,h_cache)
end
local back = glyphs.back
if back then
- rule.back = s_uncover(splitter,cache,back)
+ back = s_uncover(splitter,cache,back)
+ rule.after = s_hashed(back,h_cache)
end
local names = glyphs.names
if names then
- rule.names = s_uncover(splitter,cache,names)
+ names = s_uncover(splitter,cache,names)
+ rule.current = s_hashed(names,h_cache)
end
rule.glyphs = nil
end
diff --git a/tex/context/base/font-otn.lua b/tex/context/base/font-otn.lua
index 17c1a92e9..81b3fd267 100644
--- a/tex/context/base/font-otn.lua
+++ b/tex/context/base/font-otn.lua
@@ -259,9 +259,9 @@ local function gref(n)
local description = descriptions[n]
local name = description and description.name
if name then
- return format("U+%04X (%s)",n,name)
+ return format("U+%05X (%s)",n,name)
else
- return format("U+%04X",n)
+ return format("U+%05X",n)
end
elseif not n then
return "<error in tracing>"
@@ -269,9 +269,9 @@ local function gref(n)
local num, nam = { }, { }
for i=1,#n do
local ni = n[i]
- if tonumber(di) then -- later we will start at 2
+ if tonumber(ni) then -- later we will start at 2
local di = descriptions[ni]
- num[i] = format("U+%04X",ni)
+ num[i] = format("U+%05X",ni)
nam[i] = di and di.name or "?"
end
end
@@ -444,6 +444,9 @@ local function multiple_glyphs(start,multiple)
end
return start, true
else
+ if trace_multiples then
+ logprocess("no multiple for %s",gref(start.char))
+ end
return start, false
end
end
@@ -958,11 +961,10 @@ as less as needed but that would also mke the code even more messy.</p>
local function delete_till_stop(start,stop,ignoremarks)
if start ~= stop then
-- todo keep marks
- local done = false
- while not done do
- done = start == stop
- delete_node(start,start.next)
- end
+ repeat
+ local next = start.next
+ delete_node(start,next)
+ until next == stop
end
end
@@ -973,18 +975,19 @@ match.</p>
function chainprocs.gsub_single(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
-- todo: marks ?
- if not chainindex then
- delete_till_stop(start,stop) -- ,currentlookup.flags[1]
- end
+--~ if not chainindex then
+--~ delete_till_stop(start,stop) -- ,currentlookup.flags[1]
+--~ stop = start
+--~ end
local current = start
local subtables = currentlookup.subtables
-if #subtables > 1 then
- log_warning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
-end
+ if #subtables > 1 then
+ logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
+ end
while current do
if current.id == glyph_code then
local currentchar = current.char
- local lookupname = subtables[1]
+ local lookupname = subtables[1] -- only 1
local replacement = lookuphash[lookupname]
if not replacement then
if trace_bugs then
@@ -1548,7 +1551,8 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
else
-- todo: better space check (maybe check for glue)
local f, l = ck[4], ck[5]
- if f == l then
+ -- current match
+ if f == 1 and f == l then
-- already a hit
match = true
else
@@ -1600,8 +1604,8 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
end
-- end
end
+ -- before
if match and f > 1 then
- -- before
local prev = start.prev
if prev then
local n = f-1
@@ -1638,7 +1642,7 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
match = false break
end
prev = prev.prev
- elseif seq[n][32] then
+ elseif seq[n][32] then -- somehat special, as zapfino can have many preceding spaces
n = n -1
else
match = false break
@@ -1654,9 +1658,9 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
end
end
end
+ -- after
if match and s > l then
- -- after
- local current = last.next
+ local current = last and last.next
if current then
-- removed optimization for s-l == 1, we have to deal with marks anyway
local n = l + 1
@@ -1716,9 +1720,11 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5]
local char = start.char
if ck[9] then
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %s (%s=>%s)",cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %s (%s=>%s)",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
else
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %s",cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %s",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
end
end
local chainlookups = ck[6]
@@ -1773,7 +1779,6 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
end
start = start.next
until i > nofchainlookups
-
end
else
local replacements = ck[7]
@@ -1952,6 +1957,8 @@ local function featuresprocessor(head,font,attr)
featurevalue = dataset and dataset[1] -- todo: pass to function instead of using a global
if featurevalue then
local attribute, chain, typ, subtables = dataset[2], dataset[3], sequence.type, sequence.subtables
+--~ print(typ)
+--~ table.print(table.keys(sequence))
if chain < 0 then
-- this is a limited case, no special treatments like 'init' etc
local handler = handlers[typ]
@@ -2328,13 +2335,177 @@ local function split(replacement,original)
return result
end
-local function uncover(covers,result) -- will change (we can store this in the raw table)
- local nofresults = #result
- for n=1,#covers do
- nofresults = nofresults + 1
- result[nofresults] = covers[n]
- end
-end
+-- not shared as we hook into lookups now
+
+--~ local function uncover_1(covers,result) -- multiple covers
+--~ local nofresults = #result
+--~ for n=1,#covers do
+--~ nofresults = nofresults + 1
+--~ local u = { }
+--~ local c = covers[n]
+--~ for i=1,#c do
+--~ u[c[i]] = true
+--~ end
+--~ result[nofresults] = u
+--~ end
+--~ end
+
+--~ local function uncover_2(covers,result) -- single covers (turned into multiple with n=1)
+--~ local nofresults = #result
+--~ for n=1,#covers do
+--~ nofresults = nofresults + 1
+--~ result[nofresults] = { [covers[n]] = true }
+--~ end
+--~ end
+
+--~ local function uncover_1(covers,result) -- multiple covers
+--~ local nofresults = #result
+--~ for n=1,#covers do
+--~ nofresults = nofresults + 1
+--~ result[nofresults] = covers[n]
+--~ end
+--~ end
+
+--~ local function prepare_contextchains(tfmdata)
+--~ local rawdata = tfmdata.shared.rawdata
+--~ local resources = rawdata.resources
+--~ local lookuphash = resources.lookuphash
+--~ local lookups = rawdata.lookups
+--~ if lookups then
+--~ for lookupname, lookupdata in next, rawdata.lookups do
+--~ local lookuptype = lookupdata.type
+--~ if not lookuptype then
+--~ report_prepare("missing lookuptype for %s",lookupname)
+--~ else -- => lookuphash[lookupname][unicode]
+--~ local rules = lookupdata.rules
+--~ if rules then
+--~ local fmt = lookupdata.format
+--~ -- if fmt == "coverage" then
+--~ if fmt == "coverage" or fmt == "glyphs" then
+--~ if lookuptype ~= "chainsub" and lookuptype ~= "chainpos" then
+--~ -- todo: dejavu-serif has one (but i need to see what use it has)
+--~ report_prepare("unsupported coverage %s for %s",lookuptype,lookupname)
+--~ else
+--~ local contexts = lookuphash[lookupname]
+--~ if not contexts then
+--~ contexts = { }
+--~ lookuphash[lookupname] = contexts
+--~ end
+--~ local t, nt = { }, 0
+--~ for nofrules=1,#rules do -- does #rules>1 happen often?
+--~ local rule = rules[nofrules]
+--~ local current = rule.current
+--~ local before = rule.before
+--~ local after = rule.after
+--~ local sequence = { }
+--~ if before then
+--~ uncover_1(before,sequence)
+--~ end
+--~ local start = #sequence + 1
+--~ uncover_1(current,sequence)
+--~ local stop = #sequence
+--~ if after then
+--~ uncover_1(after,sequence)
+--~ end
+--~ if sequence[1] then
+--~ nt = nt + 1
+--~ t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups }
+--~ for unic, _ in next, sequence[start] do
+--~ local cu = contexts[unic]
+--~ if not cu then
+--~ contexts[unic] = t
+--~ end
+--~ end
+--~ end
+--~ end
+--~ end
+--~ elseif fmt == "reversecoverage" then -- we could combine both branches (only dufference is replacements)
+--~ if lookuptype ~= "reversesub" then
+--~ report_prepare("unsupported reverse coverage %s for %s",lookuptype,lookupname)
+--~ else
+--~ local contexts = lookuphash[lookupname]
+--~ if not contexts then
+--~ contexts = { }
+--~ lookuphash[lookupname] = contexts
+--~ end
+--~ local t, nt = { }, 0
+--~ for nofrules=1,#rules do
+--~ local rule = rules[nofrules]
+--~ local current = rule.current
+--~ local before = rule.before
+--~ local after = rule.after
+--~ local replacements = rule.replacements
+--~ local sequence = { }
+--~ if before then
+--~ uncover_1(before,sequence)
+--~ end
+--~ local start = #sequence + 1
+--~ uncover_1(current,sequence)
+--~ local stop = #sequence
+--~ if after then
+--~ uncover_1(after,sequence)
+--~ end
+--~ if sequence[1] then
+--~ nt = nt + 1
+--~ t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements }
+--~ for unic, _ in next, sequence[start] do
+--~ local cu = contexts[unic]
+--~ if not cu then
+--~ contexts[unic] = t
+--~ end
+--~ end
+--~ end
+--~ end
+--~ end
+--~ -- elseif fmt == "glyphs" then --maybe just make then before = { fore } and share with coverage
+--~ -- if lookuptype ~= "chainsub" and lookuptype ~= "chainpos" then
+--~ -- report_prepare("unsupported coverage %s for %s",lookuptype,lookupname)
+--~ -- else
+--~ -- local contexts = lookuphash[lookupname]
+--~ -- if not contexts then
+--~ -- contexts = { }
+--~ -- lookuphash[lookupname] = contexts
+--~ -- end
+--~ -- local t, nt = { }, 0
+--~ -- for nofrules=1,#rules do -- we can make glyphs a special case (less tables)
+--~ -- local rule = rules[nofrules]
+--~ -- local current = rule.names
+--~ -- local before = rule.fore
+--~ -- local after = rule.back
+--~ -- local sequence = { }
+--~ -- if before then
+--~ -- uncover_1(before,sequence)
+--~ -- end
+--~ -- local start = #sequence + 1
+--~ -- uncover_1(current,sequence)
+--~ -- local stop = #sequence
+--~ -- if after then
+--~ -- uncover_1(after,sequence)
+--~ -- end
+--~ -- if sequence then
+--~ -- nt = nt + 1
+--~ -- t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups }
+--~ -- for unic, _ in next, sequence[start] do
+--~ -- local cu = contexts[unic]
+--~ -- if not cu then
+--~ -- contexts[unic] = t
+--~ -- end
+--~ -- end
+--~ -- end
+--~ -- end
+--~ -- end
+--~ end
+--~ end
+--~ end
+--~ end
+--~ end
+--~ end
+
+local valid = {
+ coverage = { chainsub = true, chainpos = true },
+ reversecoverage = { reversesub = true },
+ glyphs = { chainsub = true, chainpos = true },
+}
local function prepare_contextchains(tfmdata)
local rawdata = tfmdata.shared.rawdata
@@ -2344,122 +2515,72 @@ local function prepare_contextchains(tfmdata)
if lookups then
for lookupname, lookupdata in next, rawdata.lookups do
local lookuptype = lookupdata.type
- if not lookuptype then
- report_prepare("missing lookuptype for %s",lookupname)
- else
+ if lookuptype then
local rules = lookupdata.rules
if rules then
- local fmt = lookupdata.format
- -- lookuphash[lookupname][unicode]
- if fmt == "coverage" then -- or fmt == "class" (converted into "coverage")
- if lookuptype ~= "chainsub" and lookuptype ~= "chainpos" then
- -- todo: dejavu-serif has one (but i need to see what use it has)
- report_prepare("unsupported coverage %s for %s",lookuptype,lookupname)
- else
- local contexts = lookuphash[lookupname]
- if not contexts then
- contexts = { }
- lookuphash[lookupname] = contexts
- end
- local t, nt = { }, 0
- for nofrules=1,#rules do -- does #rules>1 happen often?
- local rule = rules[nofrules]
- local current, before, after, sequence = rule.current, rule.before, rule.after, { }
- if before then
- uncover(before,sequence)
- end
- local start = #sequence + 1
- uncover(current,sequence)
- local stop = #sequence
- if after then
- uncover(after,sequence)
- end
- if sequence[1] then
- nt = nt + 1
- t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups }
- for unic, _ in next, sequence[start] do
- local cu = contexts[unic]
- if not cu then
- contexts[unic] = t
- end
- end
- end
- end
+ local format = lookupdata.format
+ local validformat = valid[format]
+ if not validformat then
+ report_prepare("unsupported format %s",format)
+ elseif not validformat[lookuptype] then
+ -- todo: dejavu-serif has one (but i need to see what use it has)
+ report_prepare("unsupported %s %s for %s",format,lookuptype,lookupname)
+ else
+ local contexts = lookuphash[lookupname]
+ if not contexts then
+ contexts = { }
+ lookuphash[lookupname] = contexts
end
- elseif fmt == "reversecoverage" then
- if lookuptype ~= "reversesub" then
- report_prepare("unsupported reverse coverage %s for %s",lookuptype,lookupname)
- else
- local contexts = lookuphash[lookupname]
- if not contexts then
- contexts = { }
- lookuphash[lookupname] = contexts
- end
- local t, nt = { }, 0
- for nofrules=1,#rules do
- local rule = rules[nofrules]
- local current, before, after, replacements, sequence = rule.current, rule.before, rule.after, rule.replacements, { }
- if before then
- uncover(before,sequence)
- end
- local start = #sequence + 1
- uncover(current,sequence)
- local stop = #sequence
- if after then
- uncover(after,sequence)
- end
- if replacements then
- replacements = split(replacements,current[1])
- end
- if sequence[1] then
- -- this is different from normal coverage, we assume only replacements
- nt = nt + 1
- t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements }
- for unic, _ in next, sequence[start] do
- local cu = contexts[unic]
- if not cu then
- contexts[unic] = t
- end
- end
+ local t, nt = { }, 0
+ for nofrules=1,#rules do
+ local rule = rules[nofrules]
+ local current = rule.current
+ local before = rule.before
+ local after = rule.after
+ local replacements = rule.replacements
+ local sequence = { }
+ local nofsequences = 0
+ -- Wventually we can store start, stop and sequence in the cached file
+ -- but then less sharing takes place so best not do that without a lot
+ -- of profiling so let's forget about it.
+ if before then
+ for n=1,#before do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = before[n]
end
end
- end
- elseif fmt == "glyphs" then --maybe just make then before = { fore } and share with coverage
- if lookuptype ~= "chainsub" and lookuptype ~= "chainpos" then
- report_prepare("unsupported coverage %s for %s",lookuptype,lookupname)
- else
- local contexts = lookuphash[lookupname]
- if not contexts then
- contexts = { }
- lookuphash[lookupname] = contexts
+ local start = nofsequences + 1
+ for n=1,#current do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = current[n]
end
- local t, nt = { }, 0
- for nofrules=1,#rules do
- local rule = rules[nofrules]
- local current, before, after, sequence = rule.names, rule.fore, rule.back, { }
- if before then
- uncover(before,sequence)
- end
- local start = #sequence + 1
- uncover(current,sequence)
- local stop = #sequence
- if after then
- uncover(after,sequence)
+ local stop = nofsequences
+ if after then
+ for n=1,#after do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = after[n]
end
- if sequence[1] then
- nt = nt + 1
- t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups }
- for unic, _ in next, sequence[start] do
- local cu = contexts[unic]
- if not cu then
- contexts[unic] = t
- end
+ end
+ if sequence[1] then
+ -- Replacements only happen with reverse lookups as they are single only. We
+ -- could pack them into current (replacement value instead of true) and then
+ -- use sequence[start] instead but it's somewhat ugly.
+ nt = nt + 1
+ t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements }
+ for unic, _ in next, sequence[start] do
+ local cu = contexts[unic]
+ if not cu then
+ contexts[unic] = t
end
end
end
end
end
+ else
+ -- no rules
end
+ else
+ report_prepare("missing lookuptype for %s",lookupname)
end
end
end
diff --git a/tex/context/base/font-otp.lua b/tex/context/base/font-otp.lua
index 55ddd539e..f019ade7f 100644
--- a/tex/context/base/font-otp.lua
+++ b/tex/context/base/font-otp.lua
@@ -202,14 +202,22 @@ local function packdata(data)
if rules then
for i=1,#rules do -- was next loop
local rule = rules[i]
- local r = rule.before if r then for i=1,#r do r[i] = pack(r[i],true) end end
- local r = rule.after if r then for i=1,#r do r[i] = pack(r[i],true) end end
- local r = rule.current if r then for i=1,#r do r[i] = pack(r[i],true) end end
- local r = rule.replacements if r then rule.replacements = pack(r, true) end
- local r = rule.fore if r then rule.fore = pack(r, true) end
- local r = rule.back if r then rule.back = pack(r, true) end
- local r = rule.names if r then rule.names = pack(r, true) end
- local r = rule.lookups if r then rule.lookups = pack(r) end
+--~ local r = rule.before if r then for i=1,#r do r[i] = pack(r[i],true) end end
+--~ local r = rule.after if r then for i=1,#r do r[i] = pack(r[i],true) end end
+--~ local r = rule.current if r then for i=1,#r do r[i] = pack(r[i],true) end end
+--~ local r = rule.replacements if r then rule.replacements = pack(r, true) end
+--~ local r = rule.fore if r then rule.fore = pack(r, true) end
+--~ local r = rule.back if r then rule.back = pack(r, true) end
+--~ local r = rule.names if r then rule.names = pack(r, true) end
+--~ local r = rule.lookups if r then rule.lookups = pack(r) end
+ local r = rule.before if r then for i=1,#r do r[i] = pack(r[i]) end end
+ local r = rule.after if r then for i=1,#r do r[i] = pack(r[i]) end end
+ local r = rule.current if r then for i=1,#r do r[i] = pack(r[i]) end end
+ local r = rule.replacements if r then rule.replacements = pack(r) end
+ -- local r = rule.fore if r then rule.fore = pack(r) end
+ -- local r = rule.back if r then rule.back = pack(r) end
+ -- local r = rule.names if r then rule.names = pack(r) end
+ local r = rule.lookups if r then rule.lookups = pack(r) end
end
end
end
diff --git a/tex/context/base/lpdf-epd.lua b/tex/context/base/lpdf-epd.lua
index ae84a29c7..7cd46f962 100644
--- a/tex/context/base/lpdf-epd.lua
+++ b/tex/context/base/lpdf-epd.lua
@@ -26,6 +26,8 @@ local setmetatable, rawset = setmetatable, rawset
-- add accessor methods to the resource dict
-- a function to mark objects as to be included
+lpdf = lpdf or { }
+
local lpdf = lpdf
-- -- -- helpers -- -- --
@@ -225,6 +227,9 @@ local catalog_access = {
}
-- rawset(t,k,p)
return p
+ else
+ print(c:dictLookup(k))
+--~ return checked_access(t,k,t:dictLookup(k))
end
end
}
diff --git a/tex/context/base/node-ini.lua b/tex/context/base/node-ini.lua
index 474edfc1d..eb70fa6e6 100644
--- a/tex/context/base/node-ini.lua
+++ b/tex/context/base/node-ini.lua
@@ -211,8 +211,6 @@ function nodes.showcodes()
end
end
--- pseudoline and shape crash on node.new
-
local whatsit_node = nodecodes.whatsit
local messyhack = table.tohash { -- temporary solution
diff --git a/tex/context/base/s-abr-01.tex b/tex/context/base/s-abr-01.tex
index dc5b0475b..45c5e4297 100644
--- a/tex/context/base/s-abr-01.tex
+++ b/tex/context/base/s-abr-01.tex
@@ -52,6 +52,7 @@
\logo [CID] {cid}
\logo [CJK] {cjk}
\logo [CMR] {cmr}
+\logo [CLD] {cld}
\logo [CMYK] {cmyk}
\logo [CODHOST] {CodHost}
\logo [CONTEXT] {\ConTeXt}
diff --git a/tex/context/base/scrn-pag.mkvi b/tex/context/base/scrn-pag.mkvi
index aa1bd6999..c982eb402 100644
--- a/tex/context/base/scrn-pag.mkvi
+++ b/tex/context/base/scrn-pag.mkvi
@@ -22,65 +22,52 @@
\installparameterhandler \??sc {interactionscreen}
\installsetuphandler \??sc {interactionscreen}
-\def\scrn_canvas_synchronize_simple % this will be done differently (or disappear)
- {\begingroup
- \ifx\@@ppleft \empty
- \ifx\@@ppright \empty
- \ifx\@@pptop \empty
- \ifx\@@ppbottom \empty
- \ifx\@@pcstate\v!start
- \locationfalse\fi\else
- \locationfalse\fi\else
- \locationfalse\fi\else
- \locationfalse\fi\else
- \locationfalse\fi
- \iflocation % without screen settings
- \ctxcommand{setupcanvas{
- paperwidth = \number\paperwidth,
- paperheight = \number\paperheight
- }}%
- \else
- \ctxcommand{setupcanvas{
- paperwidth = \number\printpaperwidth,
- paperheight = \number\printpaperheight
- }}%
- \fi
- \endgroup}
+\newdimen\canvaswidth
+\newdimen\canvasheight
+\newdimen\canvasbackoffset
+\newdimen\canvastopoffset
-\def\scrn_canvas_synchronize_complex
+\def\scrn_canvas_calculate
{\begingroup
\edef\currentinteractionscreenwidth {\interactionscreenparameter\c!width }%
\edef\currentinteractionscreenheight{\interactionscreenparameter\c!height}%
+ \canvasbackoffset\backspace
+ \canvastopoffset\topoffset
\ifx\currentinteractionscreenwidth\v!fit
- \!!widtha\leftcombitotal
- \ifdim\backspace>\!!widtha
+ \global\canvaswidth\leftcombitotal
+ \ifdim\backspace>\canvaswidth
\ifdim\backspace>\zeropoint\relax
- \advance\backspace -\!!widtha
+ \global\advance\canvasbackspace -\canvaswidth
\fi
\fi
- \advance\!!widtha\dimexpr
+ \global\advance\canvaswidth\dimexpr
\rightcombitotal
+ 2\dimexpr
\interactionscreenparameter\c!backspace
+ \interactionscreenparameter\c!horoffset
\relax
\relax
+ \donetrue
\else\ifx\currentinteractionscreenwidth\v!max
- \!!widtha\printpaperwidth
+ \global\canvaswidth\printpaperwidth
+ \donetrue
\else
- \!!widtha\currentinteractionscreenwidth
- \fi\fi
- \ifdim\!!widtha>\paperwidth\ifdim\!!widtha>\zeropoint
- \global\paperwidth\!!widtha
+ \global\canvaswidth\currentinteractionscreenwidth
+ \donefalse
\fi\fi
+ \ifdone
+ \ifdim\canvaswidth>\paperwidth\ifdim\canvaswidth>\zeropoint
+ \global\paperwidth\canvaswidth % kills location=middle
+ \fi\fi
+ \fi
\ifx\currentinteractionscreenheight\v!fit
- \!!heighta\dimexpr\topheight+\topdistance\relax
- \ifdim\topspace>\!!heighta
+ \global\canvasheight\dimexpr\topheight+\topdistance\relax
+ \ifdim\topspace>\canvasheight
\ifdim\topspace>\zeropoint\relax
- \advance\topspace -\!!heighta
+ \global\advance\canvastopspace -\canvasheight
\fi
\fi
- \advance\!!heighta\dimexpr
+ \global\advance\canvasheight\dimexpr
\makeupheight
+ \bottomdistance
+ \bottomheight
@@ -89,26 +76,64 @@
+ \interactionscreenparameter\c!veroffset
\relax
\relax
+ \donetrue
\else\ifx\currentinteractionscreenheight\v!max
- \!!heighta\printpaperheight
+ \global\canvasheight\printpaperheight
+ \donetrue
\else
- \!!heighta\currentinteractionscreenheight
- \fi\fi
- \ifdim\!!heighta>\paperheight\ifdim\!!heighta>\zeropoint
- \global\paperheight\!!heighta
+ \global\canvasheight\currentinteractionscreenheight
+ \donefalse
\fi\fi
- \ctxcommand{setupcanvas{
+ \ifdone
+ \ifdim\canvasheight>\paperheight\ifdim\canvasheight>\zeropoint
+ \global\paperheight\canvasheight % kills location=middle
+ \fi\fi
+ \fi
+ \endgroup}
+
+\appendtoks
+ \ifproductionrun
+ \scrn_canvas_calculate
+ \fi
+\to \everysetupinteractionscreen
+
+\def\scrn_canvas_synchronize_simple % this will be done differently (or disappear)
+ {\begingroup
+ \ifx\@@ppleft \empty
+ \ifx\@@ppright \empty
+ \ifx\@@pptop \empty
+ \ifx\@@ppbottom \empty
+ \ifx\@@pcstate\v!start
+ \locationfalse\fi\else
+ \locationfalse\fi\else
+ \locationfalse\fi\else
+ \locationfalse\fi\else
+ \locationfalse\fi
+ \iflocation % without screen settings
+ \ctxcommand{setupcanvas{
+ paperwidth = \number\paperwidth,
+ paperheight = \number\paperheight
+ }}%
+ \else
+ \ctxcommand{setupcanvas{
+ paperwidth = \number\printpaperwidth,
+ paperheight = \number\printpaperheight
+ }}%
+ \fi
+ \endgroup}
+
+\def\scrn_canvas_synchronize_complex
+ {\ctxcommand{setupcanvas{
mode = "\interactionscreenparameter\c!option",
singlesided = \ifsinglesided true\else false\fi,
doublesided = \ifdoublesided true\else false\fi,
- leftoffset = \number\dimexpr\backoffset\relax,
- topoffset = \number\dimexpr\topoffset \relax,
- width = \number\dimexpr\!!widtha \relax,
- height = \number\dimexpr\!!heighta \relax,
+ leftoffset = \number\dimexpr\canvasbackoffset\relax,
+ topoffset = \number\dimexpr\canvastopoffset\relax,
+ width = \number\dimexpr\canvaswidth\relax,
+ height = \number\dimexpr\canvasheight\relax,
paperwidth = \number\paperwidth,
paperheight = \number\paperheight
- }}%
- \endgroup}
+ }}}
\let\scrn_canvas_synchronize\scrn_canvas_synchronize_simple
diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf
index 1020d10ae..77031a786 100644
--- a/tex/context/base/status-files.pdf
+++ b/tex/context/base/status-files.pdf
Binary files differ
diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf
index 98d73a459..405fea2c5 100644
--- a/tex/context/base/status-lua.pdf
+++ b/tex/context/base/status-lua.pdf
Binary files differ
diff --git a/tex/context/base/trac-set.lua b/tex/context/base/trac-set.lua
index 6eeb2f1d5..27b5f17f2 100644
--- a/tex/context/base/trac-set.lua
+++ b/tex/context/base/trac-set.lua
@@ -82,7 +82,7 @@ local function set(t,what,newvalue)
else
value = is_boolean(value,value)
end
- w = escapedpattern(w,true)
+ w = "^" .. escapedpattern(w,true) .. "$" -- new: anchored
for name, functions in next, data do
if done[name] then
-- prevent recursion due to wildcards
@@ -297,11 +297,11 @@ local flags = environment and environment.engineflags
if flags then
if trackers and flags.trackers then
- setters.initialize("flags","trackers", utilities.parsers.settings_to_hash(flags.trackers))
+ setters.initialize("flags","trackers", settings_to_hash(flags.trackers))
-- t_enable(flags.trackers)
end
if directives and flags.directives then
- setters.initialize("flags","directives", utilities.parsers.settings_to_hash(flags.directives))
+ setters.initialize("flags","directives", settings_to_hash(flags.directives))
-- d_enable(flags.directives)
end
end
diff --git a/tex/context/base/util-deb.lua b/tex/context/base/util-deb.lua
index 5eabbc8c4..ce55de5c7 100644
--- a/tex/context/base/util-deb.lua
+++ b/tex/context/base/util-deb.lua
@@ -150,7 +150,7 @@ function inspect(i) -- global function
if ti == "table" then
table.print(i,"table")
elseif is_node and is_node(i) then
- print(node.sequenced(i))
+ table.print(nodes.astable(i),tostring(i))
else
print(tostring(i))
end
diff --git a/tex/generic/context/luatex-fonts-merged.lua b/tex/generic/context/luatex-fonts-merged.lua
index c12ed28e3..22117dff6 100644
--- a/tex/generic/context/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 04/11/11 18:55:38
+-- merge date : 04/13/11 09:23:15
do -- begin closure to overcome local limits and interference
@@ -4763,7 +4763,7 @@ local getn = table.getn
local lpegmatch = lpeg.match
local reversed, concat, remove = table.reversed, table.concat, table.remove
local ioflush = io.flush
-local fastcopy = table.fastcopy
+local fastcopy, tohash = table.fastcopy, table.tohash
local allocate = utilities.storage.allocate
local registertracker = trackers.register
@@ -4788,7 +4788,7 @@ local otf = fonts.handlers.otf
otf.glists = { "gsub", "gpos" }
-otf.version = 2.722 -- beware: also sync font-mis.lua
+otf.version = 2.727 -- beware: also sync font-mis.lua
otf.cache = containers.define("fonts", "otf", otf.version, true)
local fontdata = fonts.hashes.identifiers
@@ -5758,6 +5758,11 @@ actions["prepare lookups"] = function(data,filename,raw)
end
end
+-- The reverse handler does a bit redundant splitting but it's seldom
+-- seen so we don' tbother too much. We could store the replacement
+-- in the current list (value instead of true) but it makes other code
+-- uglier. Maybe some day.
+
local function t_uncover(splitter,cache,covers)
local result = { }
for n=1,#covers do
@@ -5772,6 +5777,26 @@ local function t_uncover(splitter,cache,covers)
return result
end
+local function t_hashed(t,cache)
+ if t then
+ local h = { }
+ for i=1,#t do
+ local ti = t[i]
+ local h = cache[ti]
+ if not h then
+ h = { }
+ for i=1,#ti do
+ h[ti] = true
+ end
+ end
+ cache[ti] = h
+ end
+ return h
+ else
+ return nil
+ end
+end
+
local function s_uncover(splitter,cache,cover)
if cover == "" then
return nil
@@ -5779,17 +5804,44 @@ local function s_uncover(splitter,cache,cover)
local uncovered = cache[cover]
if not uncovered then
uncovered = lpegmatch(splitter,cover)
+ for i=1,#uncovered do
+ uncovered[i] = { [uncovered[i]] = true }
+ end
cache[cover] = uncovered
end
return uncovered
end
end
+local s_hashed = t_hashed
+
+local function r_uncover(splitter,cache,cover,replacements)
+ if cover == "" then
+ return nil
+ else
+ -- we always have current as { } even in the case of one
+ local uncovered = cover[1]
+ local replaced = cache[replacements]
+ if not replaced then
+ replaced = lpegmatch(splitter,replacements)
+ cache[replacements] = replaced
+ end
+ local nu, nr = #uncovered, #replaced
+ local r = { }
+ if nu == nr then
+ for i=1,nu do
+ r[uncovered[i]] = replaced[i]
+ end
+ end
+ return r
+ end
+end
+
actions["reorganize lookups"] = function(data,filename,raw)
-- we prefer the before lookups in a normal order
if data.lookups then
local splitter = data.helpers.tounicodetable
- local cache = { }
+ local cache, h_cache = { }, { }
for _, lookup in next, data.lookups do
local rules = lookup.rules
if rules then
@@ -5815,7 +5867,7 @@ actions["reorganize lookups"] = function(data,filename,raw)
for i=1,#before do
before[i] = before_class[before[i]] or { }
end
- rule.before = before
+ rule.before = t_hashed(before,h_cache)
end
local current = class.current
local lookups = rule.lookups
@@ -5826,14 +5878,14 @@ actions["reorganize lookups"] = function(data,filename,raw)
lookups[i] = false
end
end
- rule.current = current
+ rule.current = t_hashed(current,h_cache)
end
local after = class.after
if after then
for i=1,#after do
after[i] = after_class[after[i]] or { }
end
- rule.after = after
+ rule.after = t_hashed(after,h_cache)
end
rule.class = nil
end
@@ -5848,39 +5900,45 @@ actions["reorganize lookups"] = function(data,filename,raw)
if coverage then
local before = coverage.before
if before then
- rule.before = t_uncover(splitter,cache,reversed(before))
+ before = t_uncover(splitter,cache,reversed(before))
+ rule.before = t_hashed(before,h_cache)
end
local current = coverage.current
if current then
- rule.current = t_uncover(splitter,cache,current)
+ current = t_uncover(splitter,cache,current)
+ rule.current = t_hashed(current,h_cache)
end
local after = coverage.after
if after then
- rule.after = t_uncover(splitter,cache,after)
+ after = t_uncover(splitter,cache,after)
+ rule.after = t_hashed(after,h_cache)
end
rule.coverage = nil
end
end
- elseif format == "reversecoverage" then
+ elseif format == "reversecoverage" then -- special case, single substitution only
for i=1,#rules do
local rule = rules[i]
local reversecoverage = rule.reversecoverage
if reversecoverage then
local before = reversecoverage.before
if before then
- rule.before = t_uncover(splitter,cache,reversed(before))
+ before = t_uncover(splitter,cache,reversed(before))
+ rule.before = t_hashed(before,h_cache)
end
local current = reversecoverage.current
if current then
- rule.current = t_uncover(splitter,cache,current)
+ current = t_uncover(splitter,cache,current)
+ rule.current = t_hashed(current,h_cache)
end
local after = reversecoverage.after
if after then
- rule.after = t_uncover(splitter,cache,after)
+ after = t_uncover(splitter,cache,after)
+ rule.after = t_hashed(after,h_cache)
end
local replacements = reversecoverage.replacements
if replacements then
- rule.replacements = s_uncover(splitter,cache,replacements)
+ rule.replacements = r_uncover(splitter,cache,current,replacements)
end
rule.reversecoverage = nil
end
@@ -5892,15 +5950,18 @@ actions["reorganize lookups"] = function(data,filename,raw)
if glyphs then
local fore = glyphs.fore
if fore then
- rule.fore = s_uncover(splitter,cache,fore)
+ fore = s_uncover(splitter,cache,fore)
+ rule.before = s_hashed(fore,h_cache)
end
local back = glyphs.back
if back then
- rule.back = s_uncover(splitter,cache,back)
+ back = s_uncover(splitter,cache,back)
+ rule.after = s_hashed(back,h_cache)
end
local names = glyphs.names
if names then
- rule.names = s_uncover(splitter,cache,names)
+ names = s_uncover(splitter,cache,names)
+ rule.current = s_hashed(names,h_cache)
end
rule.glyphs = nil
end
@@ -7966,9 +8027,9 @@ local function gref(n)
local description = descriptions[n]
local name = description and description.name
if name then
- return format("U+%04X (%s)",n,name)
+ return format("U+%05X (%s)",n,name)
else
- return format("U+%04X",n)
+ return format("U+%05X",n)
end
elseif not n then
return "<error in tracing>"
@@ -7976,9 +8037,9 @@ local function gref(n)
local num, nam = { }, { }
for i=1,#n do
local ni = n[i]
- if tonumber(di) then -- later we will start at 2
+ if tonumber(ni) then -- later we will start at 2
local di = descriptions[ni]
- num[i] = format("U+%04X",ni)
+ num[i] = format("U+%05X",ni)
nam[i] = di and di.name or "?"
end
end
@@ -8151,6 +8212,9 @@ local function multiple_glyphs(start,multiple)
end
return start, true
else
+ if trace_multiples then
+ logprocess("no multiple for %s",gref(start.char))
+ end
return start, false
end
end
@@ -8665,11 +8729,10 @@ as less as needed but that would also mke the code even more messy.</p>
local function delete_till_stop(start,stop,ignoremarks)
if start ~= stop then
-- todo keep marks
- local done = false
- while not done do
- done = start == stop
- delete_node(start,start.next)
- end
+ repeat
+ local next = start.next
+ delete_node(start,next)
+ until next == stop
end
end
@@ -8680,18 +8743,19 @@ match.</p>
function chainprocs.gsub_single(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
-- todo: marks ?
- if not chainindex then
- delete_till_stop(start,stop) -- ,currentlookup.flags[1]
- end
+--~ if not chainindex then
+--~ delete_till_stop(start,stop) -- ,currentlookup.flags[1]
+--~ stop = start
+--~ end
local current = start
local subtables = currentlookup.subtables
-if #subtables > 1 then
- log_warning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
-end
+ if #subtables > 1 then
+ logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
+ end
while current do
if current.id == glyph_code then
local currentchar = current.char
- local lookupname = subtables[1]
+ local lookupname = subtables[1] -- only 1
local replacement = lookuphash[lookupname]
if not replacement then
if trace_bugs then
@@ -9255,7 +9319,8 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
else
-- todo: better space check (maybe check for glue)
local f, l = ck[4], ck[5]
- if f == l then
+ -- current match
+ if f == 1 and f == l then
-- already a hit
match = true
else
@@ -9307,8 +9372,8 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
end
-- end
end
+ -- before
if match and f > 1 then
- -- before
local prev = start.prev
if prev then
local n = f-1
@@ -9345,7 +9410,7 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
match = false break
end
prev = prev.prev
- elseif seq[n][32] then
+ elseif seq[n][32] then -- somehat special, as zapfino can have many preceding spaces
n = n -1
else
match = false break
@@ -9361,9 +9426,9 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
end
end
end
+ -- after
if match and s > l then
- -- after
- local current = last.next
+ local current = last and last.next
if current then
-- removed optimization for s-l == 1, we have to deal with marks anyway
local n = l + 1
@@ -9423,9 +9488,11 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5]
local char = start.char
if ck[9] then
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %s (%s=>%s)",cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %s (%s=>%s)",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
else
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %s",cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %s",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
end
end
local chainlookups = ck[6]
@@ -9480,7 +9547,6 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
end
start = start.next
until i > nofchainlookups
-
end
else
local replacements = ck[7]
@@ -9659,6 +9725,8 @@ local function featuresprocessor(head,font,attr)
featurevalue = dataset and dataset[1] -- todo: pass to function instead of using a global
if featurevalue then
local attribute, chain, typ, subtables = dataset[2], dataset[3], sequence.type, sequence.subtables
+--~ print(typ)
+--~ table.print(table.keys(sequence))
if chain < 0 then
-- this is a limited case, no special treatments like 'init' etc
local handler = handlers[typ]
@@ -10035,13 +10103,177 @@ local function split(replacement,original)
return result
end
-local function uncover(covers,result) -- will change (we can store this in the raw table)
- local nofresults = #result
- for n=1,#covers do
- nofresults = nofresults + 1
- result[nofresults] = covers[n]
- end
-end
+-- not shared as we hook into lookups now
+
+--~ local function uncover_1(covers,result) -- multiple covers
+--~ local nofresults = #result
+--~ for n=1,#covers do
+--~ nofresults = nofresults + 1
+--~ local u = { }
+--~ local c = covers[n]
+--~ for i=1,#c do
+--~ u[c[i]] = true
+--~ end
+--~ result[nofresults] = u
+--~ end
+--~ end
+
+--~ local function uncover_2(covers,result) -- single covers (turned into multiple with n=1)
+--~ local nofresults = #result
+--~ for n=1,#covers do
+--~ nofresults = nofresults + 1
+--~ result[nofresults] = { [covers[n]] = true }
+--~ end
+--~ end
+
+--~ local function uncover_1(covers,result) -- multiple covers
+--~ local nofresults = #result
+--~ for n=1,#covers do
+--~ nofresults = nofresults + 1
+--~ result[nofresults] = covers[n]
+--~ end
+--~ end
+
+--~ local function prepare_contextchains(tfmdata)
+--~ local rawdata = tfmdata.shared.rawdata
+--~ local resources = rawdata.resources
+--~ local lookuphash = resources.lookuphash
+--~ local lookups = rawdata.lookups
+--~ if lookups then
+--~ for lookupname, lookupdata in next, rawdata.lookups do
+--~ local lookuptype = lookupdata.type
+--~ if not lookuptype then
+--~ report_prepare("missing lookuptype for %s",lookupname)
+--~ else -- => lookuphash[lookupname][unicode]
+--~ local rules = lookupdata.rules
+--~ if rules then
+--~ local fmt = lookupdata.format
+--~ -- if fmt == "coverage" then
+--~ if fmt == "coverage" or fmt == "glyphs" then
+--~ if lookuptype ~= "chainsub" and lookuptype ~= "chainpos" then
+--~ -- todo: dejavu-serif has one (but i need to see what use it has)
+--~ report_prepare("unsupported coverage %s for %s",lookuptype,lookupname)
+--~ else
+--~ local contexts = lookuphash[lookupname]
+--~ if not contexts then
+--~ contexts = { }
+--~ lookuphash[lookupname] = contexts
+--~ end
+--~ local t, nt = { }, 0
+--~ for nofrules=1,#rules do -- does #rules>1 happen often?
+--~ local rule = rules[nofrules]
+--~ local current = rule.current
+--~ local before = rule.before
+--~ local after = rule.after
+--~ local sequence = { }
+--~ if before then
+--~ uncover_1(before,sequence)
+--~ end
+--~ local start = #sequence + 1
+--~ uncover_1(current,sequence)
+--~ local stop = #sequence
+--~ if after then
+--~ uncover_1(after,sequence)
+--~ end
+--~ if sequence[1] then
+--~ nt = nt + 1
+--~ t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups }
+--~ for unic, _ in next, sequence[start] do
+--~ local cu = contexts[unic]
+--~ if not cu then
+--~ contexts[unic] = t
+--~ end
+--~ end
+--~ end
+--~ end
+--~ end
+--~ elseif fmt == "reversecoverage" then -- we could combine both branches (only dufference is replacements)
+--~ if lookuptype ~= "reversesub" then
+--~ report_prepare("unsupported reverse coverage %s for %s",lookuptype,lookupname)
+--~ else
+--~ local contexts = lookuphash[lookupname]
+--~ if not contexts then
+--~ contexts = { }
+--~ lookuphash[lookupname] = contexts
+--~ end
+--~ local t, nt = { }, 0
+--~ for nofrules=1,#rules do
+--~ local rule = rules[nofrules]
+--~ local current = rule.current
+--~ local before = rule.before
+--~ local after = rule.after
+--~ local replacements = rule.replacements
+--~ local sequence = { }
+--~ if before then
+--~ uncover_1(before,sequence)
+--~ end
+--~ local start = #sequence + 1
+--~ uncover_1(current,sequence)
+--~ local stop = #sequence
+--~ if after then
+--~ uncover_1(after,sequence)
+--~ end
+--~ if sequence[1] then
+--~ nt = nt + 1
+--~ t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements }
+--~ for unic, _ in next, sequence[start] do
+--~ local cu = contexts[unic]
+--~ if not cu then
+--~ contexts[unic] = t
+--~ end
+--~ end
+--~ end
+--~ end
+--~ end
+--~ -- elseif fmt == "glyphs" then --maybe just make then before = { fore } and share with coverage
+--~ -- if lookuptype ~= "chainsub" and lookuptype ~= "chainpos" then
+--~ -- report_prepare("unsupported coverage %s for %s",lookuptype,lookupname)
+--~ -- else
+--~ -- local contexts = lookuphash[lookupname]
+--~ -- if not contexts then
+--~ -- contexts = { }
+--~ -- lookuphash[lookupname] = contexts
+--~ -- end
+--~ -- local t, nt = { }, 0
+--~ -- for nofrules=1,#rules do -- we can make glyphs a special case (less tables)
+--~ -- local rule = rules[nofrules]
+--~ -- local current = rule.names
+--~ -- local before = rule.fore
+--~ -- local after = rule.back
+--~ -- local sequence = { }
+--~ -- if before then
+--~ -- uncover_1(before,sequence)
+--~ -- end
+--~ -- local start = #sequence + 1
+--~ -- uncover_1(current,sequence)
+--~ -- local stop = #sequence
+--~ -- if after then
+--~ -- uncover_1(after,sequence)
+--~ -- end
+--~ -- if sequence then
+--~ -- nt = nt + 1
+--~ -- t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups }
+--~ -- for unic, _ in next, sequence[start] do
+--~ -- local cu = contexts[unic]
+--~ -- if not cu then
+--~ -- contexts[unic] = t
+--~ -- end
+--~ -- end
+--~ -- end
+--~ -- end
+--~ -- end
+--~ end
+--~ end
+--~ end
+--~ end
+--~ end
+--~ end
+
+local valid = {
+ coverage = { chainsub = true, chainpos = true },
+ reversecoverage = { reversesub = true },
+ glyphs = { chainsub = true, chainpos = true },
+}
local function prepare_contextchains(tfmdata)
local rawdata = tfmdata.shared.rawdata
@@ -10051,122 +10283,72 @@ local function prepare_contextchains(tfmdata)
if lookups then
for lookupname, lookupdata in next, rawdata.lookups do
local lookuptype = lookupdata.type
- if not lookuptype then
- report_prepare("missing lookuptype for %s",lookupname)
- else
+ if lookuptype then
local rules = lookupdata.rules
if rules then
- local fmt = lookupdata.format
- -- lookuphash[lookupname][unicode]
- if fmt == "coverage" then -- or fmt == "class" (converted into "coverage")
- if lookuptype ~= "chainsub" and lookuptype ~= "chainpos" then
- -- todo: dejavu-serif has one (but i need to see what use it has)
- report_prepare("unsupported coverage %s for %s",lookuptype,lookupname)
- else
- local contexts = lookuphash[lookupname]
- if not contexts then
- contexts = { }
- lookuphash[lookupname] = contexts
- end
- local t, nt = { }, 0
- for nofrules=1,#rules do -- does #rules>1 happen often?
- local rule = rules[nofrules]
- local current, before, after, sequence = rule.current, rule.before, rule.after, { }
- if before then
- uncover(before,sequence)
- end
- local start = #sequence + 1
- uncover(current,sequence)
- local stop = #sequence
- if after then
- uncover(after,sequence)
- end
- if sequence[1] then
- nt = nt + 1
- t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups }
- for unic, _ in next, sequence[start] do
- local cu = contexts[unic]
- if not cu then
- contexts[unic] = t
- end
- end
- end
- end
+ local format = lookupdata.format
+ local validformat = valid[format]
+ if not validformat then
+ report_prepare("unsupported format %s",format)
+ elseif not validformat[lookuptype] then
+ -- todo: dejavu-serif has one (but i need to see what use it has)
+ report_prepare("unsupported %s %s for %s",format,lookuptype,lookupname)
+ else
+ local contexts = lookuphash[lookupname]
+ if not contexts then
+ contexts = { }
+ lookuphash[lookupname] = contexts
end
- elseif fmt == "reversecoverage" then
- if lookuptype ~= "reversesub" then
- report_prepare("unsupported reverse coverage %s for %s",lookuptype,lookupname)
- else
- local contexts = lookuphash[lookupname]
- if not contexts then
- contexts = { }
- lookuphash[lookupname] = contexts
- end
- local t, nt = { }, 0
- for nofrules=1,#rules do
- local rule = rules[nofrules]
- local current, before, after, replacements, sequence = rule.current, rule.before, rule.after, rule.replacements, { }
- if before then
- uncover(before,sequence)
- end
- local start = #sequence + 1
- uncover(current,sequence)
- local stop = #sequence
- if after then
- uncover(after,sequence)
- end
- if replacements then
- replacements = split(replacements,current[1])
- end
- if sequence[1] then
- -- this is different from normal coverage, we assume only replacements
- nt = nt + 1
- t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements }
- for unic, _ in next, sequence[start] do
- local cu = contexts[unic]
- if not cu then
- contexts[unic] = t
- end
- end
+ local t, nt = { }, 0
+ for nofrules=1,#rules do
+ local rule = rules[nofrules]
+ local current = rule.current
+ local before = rule.before
+ local after = rule.after
+ local replacements = rule.replacements
+ local sequence = { }
+ local nofsequences = 0
+ -- Wventually we can store start, stop and sequence in the cached file
+ -- but then less sharing takes place so best not do that without a lot
+ -- of profiling so let's forget about it.
+ if before then
+ for n=1,#before do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = before[n]
end
end
- end
- elseif fmt == "glyphs" then --maybe just make then before = { fore } and share with coverage
- if lookuptype ~= "chainsub" and lookuptype ~= "chainpos" then
- report_prepare("unsupported coverage %s for %s",lookuptype,lookupname)
- else
- local contexts = lookuphash[lookupname]
- if not contexts then
- contexts = { }
- lookuphash[lookupname] = contexts
+ local start = nofsequences + 1
+ for n=1,#current do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = current[n]
end
- local t, nt = { }, 0
- for nofrules=1,#rules do
- local rule = rules[nofrules]
- local current, before, after, sequence = rule.names, rule.fore, rule.back, { }
- if before then
- uncover(before,sequence)
- end
- local start = #sequence + 1
- uncover(current,sequence)
- local stop = #sequence
- if after then
- uncover(after,sequence)
+ local stop = nofsequences
+ if after then
+ for n=1,#after do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = after[n]
end
- if sequence[1] then
- nt = nt + 1
- t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups }
- for unic, _ in next, sequence[start] do
- local cu = contexts[unic]
- if not cu then
- contexts[unic] = t
- end
+ end
+ if sequence[1] then
+ -- Replacements only happen with reverse lookups as they are single only. We
+ -- could pack them into current (replacement value instead of true) and then
+ -- use sequence[start] instead but it's somewhat ugly.
+ nt = nt + 1
+ t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements }
+ for unic, _ in next, sequence[start] do
+ local cu = contexts[unic]
+ if not cu then
+ contexts[unic] = t
end
end
end
end
end
+ else
+ -- no rules
end
+ else
+ report_prepare("missing lookuptype for %s",lookupname)
end
end
end
@@ -10210,6 +10392,629 @@ end -- closure
do -- begin closure to overcome local limits and interference
+if not modules then modules = { } end modules ['luatex-fonts-chr'] = {
+ version = 1.001,
+ comment = "companion to luatex-fonts.lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+
+characters = characters or { }
+characters.categories = {
+ [0x0300]="mn",
+ [0x0301]="mn",
+ [0x0302]="mn",
+ [0x0303]="mn",
+ [0x0304]="mn",
+ [0x0305]="mn",
+ [0x0306]="mn",
+ [0x0307]="mn",
+ [0x0308]="mn",
+ [0x0309]="mn",
+ [0x030A]="mn",
+ [0x030B]="mn",
+ [0x030C]="mn",
+ [0x030D]="mn",
+ [0x030E]="mn",
+ [0x030F]="mn",
+ [0x0310]="mn",
+ [0x0311]="mn",
+ [0x0312]="mn",
+ [0x0313]="mn",
+ [0x0314]="mn",
+ [0x0315]="mn",
+ [0x0316]="mn",
+ [0x0317]="mn",
+ [0x0318]="mn",
+ [0x0319]="mn",
+ [0x031A]="mn",
+ [0x031B]="mn",
+ [0x031C]="mn",
+ [0x031D]="mn",
+ [0x031E]="mn",
+ [0x031F]="mn",
+ [0x0320]="mn",
+ [0x0321]="mn",
+ [0x0322]="mn",
+ [0x0323]="mn",
+ [0x0324]="mn",
+ [0x0325]="mn",
+ [0x0326]="mn",
+ [0x0327]="mn",
+ [0x0328]="mn",
+ [0x0329]="mn",
+ [0x032A]="mn",
+ [0x032B]="mn",
+ [0x032C]="mn",
+ [0x032D]="mn",
+ [0x032E]="mn",
+ [0x032F]="mn",
+ [0x0330]="mn",
+ [0x0331]="mn",
+ [0x0332]="mn",
+ [0x0333]="mn",
+ [0x0334]="mn",
+ [0x0335]="mn",
+ [0x0336]="mn",
+ [0x0337]="mn",
+ [0x0338]="mn",
+ [0x0339]="mn",
+ [0x033A]="mn",
+ [0x033B]="mn",
+ [0x033C]="mn",
+ [0x033D]="mn",
+ [0x033E]="mn",
+ [0x033F]="mn",
+ [0x0340]="mn",
+ [0x0341]="mn",
+ [0x0342]="mn",
+ [0x0343]="mn",
+ [0x0344]="mn",
+ [0x0345]="mn",
+ [0x0346]="mn",
+ [0x0347]="mn",
+ [0x0348]="mn",
+ [0x0349]="mn",
+ [0x034A]="mn",
+ [0x034B]="mn",
+ [0x034C]="mn",
+ [0x034D]="mn",
+ [0x034E]="mn",
+ [0x034F]="mn",
+ [0x0350]="mn",
+ [0x0351]="mn",
+ [0x0352]="mn",
+ [0x0353]="mn",
+ [0x0354]="mn",
+ [0x0355]="mn",
+ [0x0356]="mn",
+ [0x0357]="mn",
+ [0x0358]="mn",
+ [0x0359]="mn",
+ [0x035A]="mn",
+ [0x035B]="mn",
+ [0x035C]="mn",
+ [0x035D]="mn",
+ [0x035E]="mn",
+ [0x035F]="mn",
+ [0x0360]="mn",
+ [0x0361]="mn",
+ [0x0362]="mn",
+ [0x0363]="mn",
+ [0x0364]="mn",
+ [0x0365]="mn",
+ [0x0366]="mn",
+ [0x0367]="mn",
+ [0x0368]="mn",
+ [0x0369]="mn",
+ [0x036A]="mn",
+ [0x036B]="mn",
+ [0x036C]="mn",
+ [0x036D]="mn",
+ [0x036E]="mn",
+ [0x036F]="mn",
+ [0x0483]="mn",
+ [0x0484]="mn",
+ [0x0485]="mn",
+ [0x0486]="mn",
+ [0x0591]="mn",
+ [0x0592]="mn",
+ [0x0593]="mn",
+ [0x0594]="mn",
+ [0x0595]="mn",
+ [0x0596]="mn",
+ [0x0597]="mn",
+ [0x0598]="mn",
+ [0x0599]="mn",
+ [0x059A]="mn",
+ [0x059B]="mn",
+ [0x059C]="mn",
+ [0x059D]="mn",
+ [0x059E]="mn",
+ [0x059F]="mn",
+ [0x05A0]="mn",
+ [0x05A1]="mn",
+ [0x05A2]="mn",
+ [0x05A3]="mn",
+ [0x05A4]="mn",
+ [0x05A5]="mn",
+ [0x05A6]="mn",
+ [0x05A7]="mn",
+ [0x05A8]="mn",
+ [0x05A9]="mn",
+ [0x05AA]="mn",
+ [0x05AB]="mn",
+ [0x05AC]="mn",
+ [0x05AD]="mn",
+ [0x05AE]="mn",
+ [0x05AF]="mn",
+ [0x05B0]="mn",
+ [0x05B1]="mn",
+ [0x05B2]="mn",
+ [0x05B3]="mn",
+ [0x05B4]="mn",
+ [0x05B5]="mn",
+ [0x05B6]="mn",
+ [0x05B7]="mn",
+ [0x05B8]="mn",
+ [0x05B9]="mn",
+ [0x05BA]="mn",
+ [0x05BB]="mn",
+ [0x05BC]="mn",
+ [0x05BD]="mn",
+ [0x05BF]="mn",
+ [0x05C1]="mn",
+ [0x05C2]="mn",
+ [0x05C4]="mn",
+ [0x05C5]="mn",
+ [0x05C7]="mn",
+ [0x0610]="mn",
+ [0x0611]="mn",
+ [0x0612]="mn",
+ [0x0613]="mn",
+ [0x0614]="mn",
+ [0x0615]="mn",
+ [0x064B]="mn",
+ [0x064C]="mn",
+ [0x064D]="mn",
+ [0x064E]="mn",
+ [0x064F]="mn",
+ [0x0650]="mn",
+ [0x0651]="mn",
+ [0x0652]="mn",
+ [0x0653]="mn",
+ [0x0654]="mn",
+ [0x0655]="mn",
+ [0x0656]="mn",
+ [0x0657]="mn",
+ [0x0658]="mn",
+ [0x0659]="mn",
+ [0x065A]="mn",
+ [0x065B]="mn",
+ [0x065C]="mn",
+ [0x065D]="mn",
+ [0x065E]="mn",
+ [0x0670]="mn",
+ [0x06D6]="mn",
+ [0x06D7]="mn",
+ [0x06D8]="mn",
+ [0x06D9]="mn",
+ [0x06DA]="mn",
+ [0x06DB]="mn",
+ [0x06DC]="mn",
+ [0x06DF]="mn",
+ [0x06E0]="mn",
+ [0x06E1]="mn",
+ [0x06E2]="mn",
+ [0x06E3]="mn",
+ [0x06E4]="mn",
+ [0x06E7]="mn",
+ [0x06E8]="mn",
+ [0x06EA]="mn",
+ [0x06EB]="mn",
+ [0x06EC]="mn",
+ [0x06ED]="mn",
+ [0x0711]="mn",
+ [0x0730]="mn",
+ [0x0731]="mn",
+ [0x0732]="mn",
+ [0x0733]="mn",
+ [0x0734]="mn",
+ [0x0735]="mn",
+ [0x0736]="mn",
+ [0x0737]="mn",
+ [0x0738]="mn",
+ [0x0739]="mn",
+ [0x073A]="mn",
+ [0x073B]="mn",
+ [0x073C]="mn",
+ [0x073D]="mn",
+ [0x073E]="mn",
+ [0x073F]="mn",
+ [0x0740]="mn",
+ [0x0741]="mn",
+ [0x0742]="mn",
+ [0x0743]="mn",
+ [0x0744]="mn",
+ [0x0745]="mn",
+ [0x0746]="mn",
+ [0x0747]="mn",
+ [0x0748]="mn",
+ [0x0749]="mn",
+ [0x074A]="mn",
+ [0x07A6]="mn",
+ [0x07A7]="mn",
+ [0x07A8]="mn",
+ [0x07A9]="mn",
+ [0x07AA]="mn",
+ [0x07AB]="mn",
+ [0x07AC]="mn",
+ [0x07AD]="mn",
+ [0x07AE]="mn",
+ [0x07AF]="mn",
+ [0x07B0]="mn",
+ [0x07EB]="mn",
+ [0x07EC]="mn",
+ [0x07ED]="mn",
+ [0x07EE]="mn",
+ [0x07EF]="mn",
+ [0x07F0]="mn",
+ [0x07F1]="mn",
+ [0x07F2]="mn",
+ [0x07F3]="mn",
+ [0x0901]="mn",
+ [0x0902]="mn",
+ [0x093C]="mn",
+ [0x0941]="mn",
+ [0x0942]="mn",
+ [0x0943]="mn",
+ [0x0944]="mn",
+ [0x0945]="mn",
+ [0x0946]="mn",
+ [0x0947]="mn",
+ [0x0948]="mn",
+ [0x094D]="mn",
+ [0x0951]="mn",
+ [0x0952]="mn",
+ [0x0953]="mn",
+ [0x0954]="mn",
+ [0x0962]="mn",
+ [0x0963]="mn",
+ [0x0981]="mn",
+ [0x09BC]="mn",
+ [0x09C1]="mn",
+ [0x09C2]="mn",
+ [0x09C3]="mn",
+ [0x09C4]="mn",
+ [0x09CD]="mn",
+ [0x09E2]="mn",
+ [0x09E3]="mn",
+ [0x0A01]="mn",
+ [0x0A02]="mn",
+ [0x0A3C]="mn",
+ [0x0A41]="mn",
+ [0x0A42]="mn",
+ [0x0A47]="mn",
+ [0x0A48]="mn",
+ [0x0A4B]="mn",
+ [0x0A4C]="mn",
+ [0x0A4D]="mn",
+ [0x0A70]="mn",
+ [0x0A71]="mn",
+ [0x0A81]="mn",
+ [0x0A82]="mn",
+ [0x0ABC]="mn",
+ [0x0AC1]="mn",
+ [0x0AC2]="mn",
+ [0x0AC3]="mn",
+ [0x0AC4]="mn",
+ [0x0AC5]="mn",
+ [0x0AC7]="mn",
+ [0x0AC8]="mn",
+ [0x0ACD]="mn",
+ [0x0AE2]="mn",
+ [0x0AE3]="mn",
+ [0x0B01]="mn",
+ [0x0B3C]="mn",
+ [0x0B3F]="mn",
+ [0x0B41]="mn",
+ [0x0B42]="mn",
+ [0x0B43]="mn",
+ [0x0B4D]="mn",
+ [0x0B56]="mn",
+ [0x0B82]="mn",
+ [0x0BC0]="mn",
+ [0x0BCD]="mn",
+ [0x0C3E]="mn",
+ [0x0C3F]="mn",
+ [0x0C40]="mn",
+ [0x0C46]="mn",
+ [0x0C47]="mn",
+ [0x0C48]="mn",
+ [0x0C4A]="mn",
+ [0x0C4B]="mn",
+ [0x0C4C]="mn",
+ [0x0C4D]="mn",
+ [0x0C55]="mn",
+ [0x0C56]="mn",
+ [0x0CBC]="mn",
+ [0x0CBF]="mn",
+ [0x0CC6]="mn",
+ [0x0CCC]="mn",
+ [0x0CCD]="mn",
+ [0x0CE2]="mn",
+ [0x0CE3]="mn",
+ [0x0D41]="mn",
+ [0x0D42]="mn",
+ [0x0D43]="mn",
+ [0x0D4D]="mn",
+ [0x0DCA]="mn",
+ [0x0DD2]="mn",
+ [0x0DD3]="mn",
+ [0x0DD4]="mn",
+ [0x0DD6]="mn",
+ [0x0E31]="mn",
+ [0x0E34]="mn",
+ [0x0E35]="mn",
+ [0x0E36]="mn",
+ [0x0E37]="mn",
+ [0x0E38]="mn",
+ [0x0E39]="mn",
+ [0x0E3A]="mn",
+ [0x0E47]="mn",
+ [0x0E48]="mn",
+ [0x0E49]="mn",
+ [0x0E4A]="mn",
+ [0x0E4B]="mn",
+ [0x0E4C]="mn",
+ [0x0E4D]="mn",
+ [0x0E4E]="mn",
+ [0x0EB1]="mn",
+ [0x0EB4]="mn",
+ [0x0EB5]="mn",
+ [0x0EB6]="mn",
+ [0x0EB7]="mn",
+ [0x0EB8]="mn",
+ [0x0EB9]="mn",
+ [0x0EBB]="mn",
+ [0x0EBC]="mn",
+ [0x0EC8]="mn",
+ [0x0EC9]="mn",
+ [0x0ECA]="mn",
+ [0x0ECB]="mn",
+ [0x0ECC]="mn",
+ [0x0ECD]="mn",
+ [0x0F18]="mn",
+ [0x0F19]="mn",
+ [0x0F35]="mn",
+ [0x0F37]="mn",
+ [0x0F39]="mn",
+ [0x0F71]="mn",
+ [0x0F72]="mn",
+ [0x0F73]="mn",
+ [0x0F74]="mn",
+ [0x0F75]="mn",
+ [0x0F76]="mn",
+ [0x0F77]="mn",
+ [0x0F78]="mn",
+ [0x0F79]="mn",
+ [0x0F7A]="mn",
+ [0x0F7B]="mn",
+ [0x0F7C]="mn",
+ [0x0F7D]="mn",
+ [0x0F7E]="mn",
+ [0x0F80]="mn",
+ [0x0F81]="mn",
+ [0x0F82]="mn",
+ [0x0F83]="mn",
+ [0x0F84]="mn",
+ [0x0F86]="mn",
+ [0x0F87]="mn",
+ [0x0F90]="mn",
+ [0x0F91]="mn",
+ [0x0F92]="mn",
+ [0x0F93]="mn",
+ [0x0F94]="mn",
+ [0x0F95]="mn",
+ [0x0F96]="mn",
+ [0x0F97]="mn",
+ [0x0F99]="mn",
+ [0x0F9A]="mn",
+ [0x0F9B]="mn",
+ [0x0F9C]="mn",
+ [0x0F9D]="mn",
+ [0x0F9E]="mn",
+ [0x0F9F]="mn",
+ [0x0FA0]="mn",
+ [0x0FA1]="mn",
+ [0x0FA2]="mn",
+ [0x0FA3]="mn",
+ [0x0FA4]="mn",
+ [0x0FA5]="mn",
+ [0x0FA6]="mn",
+ [0x0FA7]="mn",
+ [0x0FA8]="mn",
+ [0x0FA9]="mn",
+ [0x0FAA]="mn",
+ [0x0FAB]="mn",
+ [0x0FAC]="mn",
+ [0x0FAD]="mn",
+ [0x0FAE]="mn",
+ [0x0FAF]="mn",
+ [0x0FB0]="mn",
+ [0x0FB1]="mn",
+ [0x0FB2]="mn",
+ [0x0FB3]="mn",
+ [0x0FB4]="mn",
+ [0x0FB5]="mn",
+ [0x0FB6]="mn",
+ [0x0FB7]="mn",
+ [0x0FB8]="mn",
+ [0x0FB9]="mn",
+ [0x0FBA]="mn",
+ [0x0FBB]="mn",
+ [0x0FBC]="mn",
+ [0x0FC6]="mn",
+ [0x102D]="mn",
+ [0x102E]="mn",
+ [0x102F]="mn",
+ [0x1030]="mn",
+ [0x1032]="mn",
+ [0x1036]="mn",
+ [0x1037]="mn",
+ [0x1039]="mn",
+ [0x1058]="mn",
+ [0x1059]="mn",
+ [0x135F]="mn",
+ [0x1712]="mn",
+ [0x1713]="mn",
+ [0x1714]="mn",
+ [0x1732]="mn",
+ [0x1733]="mn",
+ [0x1734]="mn",
+ [0x1752]="mn",
+ [0x1753]="mn",
+ [0x1772]="mn",
+ [0x1773]="mn",
+ [0x17B7]="mn",
+ [0x17B8]="mn",
+ [0x17B9]="mn",
+ [0x17BA]="mn",
+ [0x17BB]="mn",
+ [0x17BC]="mn",
+ [0x17BD]="mn",
+ [0x17C6]="mn",
+ [0x17C9]="mn",
+ [0x17CA]="mn",
+ [0x17CB]="mn",
+ [0x17CC]="mn",
+ [0x17CD]="mn",
+ [0x17CE]="mn",
+ [0x17CF]="mn",
+ [0x17D0]="mn",
+ [0x17D1]="mn",
+ [0x17D2]="mn",
+ [0x17D3]="mn",
+ [0x17DD]="mn",
+ [0x180B]="mn",
+ [0x180C]="mn",
+ [0x180D]="mn",
+ [0x18A9]="mn",
+ [0x1920]="mn",
+ [0x1921]="mn",
+ [0x1922]="mn",
+ [0x1927]="mn",
+ [0x1928]="mn",
+ [0x1932]="mn",
+ [0x1939]="mn",
+ [0x193A]="mn",
+ [0x193B]="mn",
+ [0x1A17]="mn",
+ [0x1A18]="mn",
+ [0x1B00]="mn",
+ [0x1B01]="mn",
+ [0x1B02]="mn",
+ [0x1B03]="mn",
+ [0x1B34]="mn",
+ [0x1B36]="mn",
+ [0x1B37]="mn",
+ [0x1B38]="mn",
+ [0x1B39]="mn",
+ [0x1B3A]="mn",
+ [0x1B3C]="mn",
+ [0x1B42]="mn",
+ [0x1B6B]="mn",
+ [0x1B6C]="mn",
+ [0x1B6D]="mn",
+ [0x1B6E]="mn",
+ [0x1B6F]="mn",
+ [0x1B70]="mn",
+ [0x1B71]="mn",
+ [0x1B72]="mn",
+ [0x1B73]="mn",
+ [0x1DC0]="mn",
+ [0x1DC1]="mn",
+ [0x1DC2]="mn",
+ [0x1DC3]="mn",
+ [0x1DC4]="mn",
+ [0x1DC5]="mn",
+ [0x1DC6]="mn",
+ [0x1DC7]="mn",
+ [0x1DC8]="mn",
+ [0x1DC9]="mn",
+ [0x1DCA]="mn",
+ [0x1DFE]="mn",
+ [0x1DFF]="mn",
+ [0x20D0]="mn",
+ [0x20D1]="mn",
+ [0x20D2]="mn",
+ [0x20D3]="mn",
+ [0x20D4]="mn",
+ [0x20D5]="mn",
+ [0x20D6]="mn",
+ [0x20D7]="mn",
+ [0x20D8]="mn",
+ [0x20D9]="mn",
+ [0x20DA]="mn",
+ [0x20DB]="mn",
+ [0x20DC]="mn",
+ [0x20E1]="mn",
+ [0x20E5]="mn",
+ [0x20E6]="mn",
+ [0x20E7]="mn",
+ [0x20E8]="mn",
+ [0x20E9]="mn",
+ [0x20EA]="mn",
+ [0x20EB]="mn",
+ [0x20EC]="mn",
+ [0x20ED]="mn",
+ [0x20EE]="mn",
+ [0x20EF]="mn",
+ [0x302A]="mn",
+ [0x302B]="mn",
+ [0x302C]="mn",
+ [0x302D]="mn",
+ [0x302E]="mn",
+ [0x302F]="mn",
+ [0x3099]="mn",
+ [0x309A]="mn",
+ [0xA806]="mn",
+ [0xA80B]="mn",
+ [0xA825]="mn",
+ [0xA826]="mn",
+ [0xFB1E]="mn",
+ [0xFE00]="mn",
+ [0xFE01]="mn",
+ [0xFE02]="mn",
+ [0xFE03]="mn",
+ [0xFE04]="mn",
+ [0xFE05]="mn",
+ [0xFE06]="mn",
+ [0xFE07]="mn",
+ [0xFE08]="mn",
+ [0xFE09]="mn",
+ [0xFE0A]="mn",
+ [0xFE0B]="mn",
+ [0xFE0C]="mn",
+ [0xFE0D]="mn",
+ [0xFE0E]="mn",
+ [0xFE0F]="mn",
+ [0xFE20]="mn",
+ [0xFE21]="mn",
+ [0xFE22]="mn",
+ [0xFE23]="mn",
+}
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
if not modules then modules = { } end modules ['font-ota'] = {
version = 1.001,
comment = "companion to font-otf.lua (analysing)",
@@ -10268,8 +11073,6 @@ process features right.</p>
-- todo: analyzers per script/lang, cross font, so we need an font id hash -> script
-- e.g. latin -> hyphenate, arab -> 1/2/3 analyze -- its own namespace
--- an example analyzer (should move to font-ota.lua)
-
local state = attributes.private('state')
function analyzers.setstate(head,font)
@@ -10377,7 +11180,8 @@ registerotffeature {
methods.latn = analyzers.setstate
--- this info eventually will go into char-def
+-- this info eventually will go into char-def adn we will have a state
+-- table for generic then
local zwnj = 0x200C
local zwj = 0x200D
@@ -10564,6 +11368,10 @@ function methods.arab(head,font,attr) -- maybe make a special version with no tr
return head, done
end
+directives.register("otf.analyze.useunicodemarks",function(v)
+ analyzers.useunicodemarks = v
+end)
+
end -- closure
do -- begin closure to overcome local limits and interference
diff --git a/tex/generic/context/luatex-fonts.lua b/tex/generic/context/luatex-fonts.lua
index 84d79a63f..23d33f26b 100644
--- a/tex/generic/context/luatex-fonts.lua
+++ b/tex/generic/context/luatex-fonts.lua
@@ -166,6 +166,7 @@ else
loadmodule('font-otb.lua')
loadmodule('node-inj.lua') -- will be replaced (luatex >= .70)
loadmodule('font-otn.lua')
+ -- loadmodule('luatex-fonts-chr.lua')
loadmodule('font-ota.lua')
loadmodule('luatex-fonts-lua.lua')
loadmodule('font-def.lua')