summaryrefslogtreecommitdiff
path: root/tex
diff options
context:
space:
mode:
authorMarius <mariausol@gmail.com>2013-08-07 15:40:24 +0300
committerMarius <mariausol@gmail.com>2013-08-07 15:40:24 +0300
commit59e188b96de531e50d10b4a3a6fa6d0c941adf24 (patch)
treee8b23a8d401e111e6203b88171c886505dd686e6 /tex
parent0a1d66e9356ae4676438c7e1865d71331437e412 (diff)
downloadcontext-59e188b96de531e50d10b4a3a6fa6d0c941adf24.tar.gz
beta 2013.08.07 14:40
Diffstat (limited to 'tex')
-rw-r--r--tex/context/base/cont-new.mkiv2
-rw-r--r--tex/context/base/context-version.pdfbin4114 -> 4103 bytes
-rw-r--r--tex/context/base/context.mkiv2
-rw-r--r--tex/context/base/font-mis.lua2
-rw-r--r--tex/context/base/font-otf.lua259
-rw-r--r--tex/context/base/status-files.pdfbin24703 -> 24676 bytes
-rw-r--r--tex/context/base/status-lua.log2
-rw-r--r--tex/generic/context/luatex/luatex-fonts-merged.lua137
8 files changed, 285 insertions, 119 deletions
diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv
index 1af85a7cd..9911d5c9d 100644
--- a/tex/context/base/cont-new.mkiv
+++ b/tex/context/base/cont-new.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2013.08.07 10:55}
+\newcontextversion{2013.08.07 14:40}
%D This file is loaded at runtime, thereby providing an excellent place for
%D hacks, patches, extensions and new features.
diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf
index ef25efc14..ae939b303 100644
--- a/tex/context/base/context-version.pdf
+++ b/tex/context/base/context-version.pdf
Binary files differ
diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv
index 4b2e0bdd7..1dadf05ed 100644
--- a/tex/context/base/context.mkiv
+++ b/tex/context/base/context.mkiv
@@ -25,7 +25,7 @@
%D up and the dependencies are more consistent.
\edef\contextformat {\jobname}
-\edef\contextversion{2013.08.07 10:55}
+\edef\contextversion{2013.08.07 14:40}
\edef\contextkind {beta}
%D For those who want to use this:
diff --git a/tex/context/base/font-mis.lua b/tex/context/base/font-mis.lua
index 87dea321b..0796356c4 100644
--- a/tex/context/base/font-mis.lua
+++ b/tex/context/base/font-mis.lua
@@ -22,7 +22,7 @@ local handlers = fonts.handlers
handlers.otf = handlers.otf or { }
local otf = handlers.otf
-otf.version = otf.version or 2.744
+otf.version = otf.version or 2.745
otf.cache = otf.cache or containers.define("fonts", "otf", otf.version, true)
function otf.loadcached(filename,format,sub)
diff --git a/tex/context/base/font-otf.lua b/tex/context/base/font-otf.lua
index 6cb5ee9b9..d13ec4acf 100644
--- a/tex/context/base/font-otf.lua
+++ b/tex/context/base/font-otf.lua
@@ -48,7 +48,7 @@ local otf = fonts.handlers.otf
otf.glists = { "gsub", "gpos" }
-otf.version = 2.744 -- beware: also sync font-mis.lua
+otf.version = 2.745 -- beware: also sync font-mis.lua
otf.cache = containers.define("fonts", "otf", otf.version, true)
local fontdata = fonts.hashes.identifiers
@@ -73,6 +73,7 @@ local packdata = true
local syncspace = true
local forcenotdef = false
local includesubfonts = false
+local overloadkerns = false -- experiment
local wildcard = "*"
local default = "dflt"
@@ -87,6 +88,7 @@ registerdirective("fonts.otf.loader.usemetatables", function(v) usemetatables =
registerdirective("fonts.otf.loader.pack", function(v) packdata = v end)
registerdirective("fonts.otf.loader.syncspace", function(v) syncspace = v end)
registerdirective("fonts.otf.loader.forcenotdef", function(v) forcenotdef = v end)
+registerdirective("fonts.otf.loader.overloadkerns", function(v) overloadkerns = v end)
local function load_featurefile(raw,featurefile)
if featurefile and featurefile ~= "" then
@@ -1534,6 +1536,118 @@ actions["reorganize glyph kerns"] = function(data,filename,raw)
end
end
+-- actions["merge kern classes"] = function(data,filename,raw)
+-- local gposlist = raw.gpos
+-- if gposlist then
+-- local descriptions = data.descriptions
+-- local resources = data.resources
+-- local unicodes = resources.unicodes
+-- local splitter = data.helpers.tounicodetable
+-- local ignored = 0
+-- for gp=1,#gposlist do
+-- local gpos = gposlist[gp]
+-- local subtables = gpos.subtables
+-- if subtables then
+-- for s=1,#subtables do
+-- local subtable = subtables[s]
+-- local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes
+-- if kernclass then -- the next one is quite slow
+-- local split = { } -- saves time
+-- for k=1,#kernclass do
+-- local kcl = kernclass[k]
+-- local firsts = kcl.firsts
+-- local seconds = kcl.seconds
+-- local offsets = kcl.offsets
+-- local lookups = kcl.lookup -- singular
+-- if type(lookups) ~= "table" then
+-- lookups = { lookups }
+-- end
+-- -- if offsets[1] == nil then
+-- -- offsets[1] = "" -- defaults ?
+-- -- end
+-- -- we can check the max in the loop
+-- -- local maxseconds = getn(seconds)
+-- for n, s in next, firsts do
+-- split[s] = split[s] or lpegmatch(splitter,s)
+-- end
+-- local maxseconds = 0
+-- for n, s in next, seconds do
+-- if n > maxseconds then
+-- maxseconds = n
+-- end
+-- split[s] = split[s] or lpegmatch(splitter,s)
+-- end
+-- for l=1,#lookups do
+-- local lookup = lookups[l]
+-- for fk=1,#firsts do -- maxfirsts ?
+-- local fv = firsts[fk]
+-- local splt = split[fv]
+-- if splt then
+-- local extrakerns = { }
+-- local baseoffset = (fk-1) * maxseconds
+-- for sk=2,maxseconds do -- will become 1 based in future luatex
+-- local sv = seconds[sk]
+-- -- for sk, sv in next, seconds do
+-- local splt = split[sv]
+-- if splt then -- redundant test
+-- local offset = offsets[baseoffset + sk]
+-- if offset then
+-- for i=1,#splt do
+-- extrakerns[splt[i]] = offset
+-- end
+-- end
+-- end
+-- end
+-- for i=1,#splt do
+-- local first_unicode = splt[i]
+-- local description = descriptions[first_unicode]
+-- if description then
+-- local kerns = description.kerns
+-- if not kerns then
+-- kerns = { } -- unicode indexed !
+-- description.kerns = kerns
+-- end
+-- local lookupkerns = kerns[lookup]
+-- if not lookupkerns then
+-- lookupkerns = { }
+-- kerns[lookup] = lookupkerns
+-- end
+-- if overloadkerns then
+-- for second_unicode, kern in next, extrakerns do
+-- lookupkerns[second_unicode] = kern
+-- end
+-- else
+-- for second_unicode, kern in next, extrakerns do
+-- local k = lookupkerns[second_unicode]
+-- if not k then
+-- lookupkerns[second_unicode] = kern
+-- elseif k ~= kern then
+-- if trace_loading then
+-- report_otf("lookup %a: ignoring overload of kern between %C and %C, rejecting %a, keeping %a",lookup,first_unicode,second_unicode,k,kern)
+-- end
+-- ignored = ignored + 1
+-- end
+-- end
+-- end
+-- elseif trace_loading then
+-- report_otf("no glyph data for %U", first_unicode)
+-- end
+-- end
+-- end
+-- end
+-- end
+-- end
+-- subtable.kernclass = { }
+-- end
+-- end
+-- end
+-- end
+-- if ignored > 0 then
+-- report_otf("%s kern overloads ignored")
+-- end
+-- end
+-- end
+
actions["merge kern classes"] = function(data,filename,raw)
local gposlist = raw.gpos
if gposlist then
@@ -1541,80 +1655,99 @@ actions["merge kern classes"] = function(data,filename,raw)
local resources = data.resources
local unicodes = resources.unicodes
local splitter = data.helpers.tounicodetable
+ local ignored = 0
+ local blocked = 0
for gp=1,#gposlist do
local gpos = gposlist[gp]
local subtables = gpos.subtables
if subtables then
+ local first_done = { } -- could become an option so that we can deal with buggy fonts that don't get fixed
+ local split = { } -- saves time .. although probably not that much any more in the fixed luatex kernclass table
for s=1,#subtables do
local subtable = subtables[s]
local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes
+ local lookup = subtable.lookup or subtable.name
if kernclass then -- the next one is quite slow
- local split = { } -- saves time
- for k=1,#kernclass do
- local kcl = kernclass[k]
- local firsts = kcl.firsts
- local seconds = kcl.seconds
- local offsets = kcl.offsets
- local lookups = kcl.lookup -- singular
- if type(lookups) ~= "table" then
- lookups = { lookups }
- end
- -- if offsets[1] == nil then
- -- offsets[1] = ""
- -- end
- -- we can check the max in the loop
- -- local maxseconds = getn(seconds)
- for n, s in next, firsts do
- split[s] = split[s] or lpegmatch(splitter,s)
- end
- local maxseconds = 0
- for n, s in next, seconds do
- if n > maxseconds then
- maxseconds = n
- end
- split[s] = split[s] or lpegmatch(splitter,s)
+ if #kernclass > 0 then
+ kernclass = kernclass[1]
+ lookup = type(kernclass.lookup) == "string" and kernclass.lookup or lookup
+ report_otf("fixing kernclass table of lookup %a",lookup)
+ end
+ local firsts = kernclass.firsts
+ local seconds = kernclass.seconds
+ local offsets = kernclass.offsets
+ -- if offsets[1] == nil then
+ -- offsets[1] = "" -- defaults ?
+ -- end
+ -- we can check the max in the loop
+ -- local maxseconds = getn(seconds)
+ for n, s in next, firsts do
+ split[s] = split[s] or lpegmatch(splitter,s)
+ end
+ local maxseconds = 0
+ for n, s in next, seconds do
+ if n > maxseconds then
+ maxseconds = n
end
- for l=1,#lookups do
- local lookup = lookups[l]
- for fk=1,#firsts do -- maxfirsts ?
- local fv = firsts[fk]
- local splt = split[fv]
- if splt then
- local extrakerns = { }
- local baseoffset = (fk-1) * maxseconds
- for sk=2,maxseconds do -- will become 1 based in future luatex
- local sv = seconds[sk]
- -- for sk, sv in next, seconds do
- local splt = split[sv]
- if splt then -- redundant test
- local offset = offsets[baseoffset + sk]
- if offset then
- for i=1,#splt do
- extrakerns[splt[i]] = offset
- end
- end
+ split[s] = split[s] or lpegmatch(splitter,s)
+ end
+ for fk=1,#firsts do -- maxfirsts ?
+ local fv = firsts[fk]
+ local splt = split[fv]
+ if splt then
+ local extrakerns = { }
+ local baseoffset = (fk-1) * maxseconds
+ for sk=2,maxseconds do -- will become 1 based in future luatex
+ local sv = seconds[sk]
+ -- for sk, sv in next, seconds do
+ local splt = split[sv]
+ if splt then -- redundant test
+ local offset = offsets[baseoffset + sk]
+ if offset then
+ for i=1,#splt do
+ extrakerns[splt[i]] = offset
end
end
- for i=1,#splt do
- local first_unicode = splt[i]
- local description = descriptions[first_unicode]
- if description then
- local kerns = description.kerns
- if not kerns then
- kerns = { } -- unicode indexed !
- description.kerns = kerns
- end
- local lookupkerns = kerns[lookup]
- if not lookupkerns then
- lookupkerns = { }
- kerns[lookup] = lookupkerns
- end
+ end
+ end
+ for i=1,#splt do
+ local first_unicode = splt[i]
+ if first_done[first_unicode] then
+ report_otf("lookup %a: ignoring further kerns of %C",lookup,first_unicode)
+ blocked = blocked + 1
+ else
+ first_done[first_unicode] = true
+ local description = descriptions[first_unicode]
+ if description then
+ local kerns = description.kerns
+ if not kerns then
+ kerns = { } -- unicode indexed !
+ description.kerns = kerns
+ end
+ local lookupkerns = kerns[lookup]
+ if not lookupkerns then
+ lookupkerns = { }
+ kerns[lookup] = lookupkerns
+ end
+ if overloadkerns then
for second_unicode, kern in next, extrakerns do
lookupkerns[second_unicode] = kern
end
- elseif trace_loading then
- report_otf("no glyph data for %U", first_unicode)
+ else
+ for second_unicode, kern in next, extrakerns do
+ local k = lookupkerns[second_unicode]
+ if not k then
+ lookupkerns[second_unicode] = kern
+ elseif k ~= kern then
+ if trace_loading then
+ report_otf("lookup %a: ignoring overload of kern between %C and %C, rejecting %a, keeping %a",lookup,first_unicode,second_unicode,k,kern)
+ end
+ ignored = ignored + 1
+ end
+ end
end
+ elseif trace_loading then
+ report_otf("no glyph data for %U", first_unicode)
end
end
end
@@ -1625,6 +1758,12 @@ actions["merge kern classes"] = function(data,filename,raw)
end
end
end
+ if ignored > 0 then
+ report_otf("%s kern overloads ignored",ignored)
+ end
+ if blocked > 0 then
+ report_otf("%s succesive kerns blocked",blocked)
+ end
end
end
diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf
index 0ec5a6755..ab7f03967 100644
--- a/tex/context/base/status-files.pdf
+++ b/tex/context/base/status-files.pdf
Binary files differ
diff --git a/tex/context/base/status-lua.log b/tex/context/base/status-lua.log
index 370c5ea16..d5601fd3d 100644
--- a/tex/context/base/status-lua.log
+++ b/tex/context/base/status-lua.log
@@ -1,6 +1,6 @@
(cont-yes.mkiv
-ConTeXt ver: 2013.08.07 10:55 MKIV beta fmt: 2013.8.7 int: english/english
+ConTeXt ver: 2013.08.07 14:40 MKIV beta fmt: 2013.8.7 int: english/english
system > 'cont-new.mkiv' loaded
(cont-new.mkiv)
diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua
index f7b952af9..93a3de890 100644
--- a/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 08/07/13 10:55:57
+-- merge date : 08/07/13 14:40:18
do -- begin closure to overcome local limits and interference
@@ -5184,7 +5184,7 @@ local report_otf=logs.reporter("fonts","otf loading")
local fonts=fonts
local otf=fonts.handlers.otf
otf.glists={ "gsub","gpos" }
-otf.version=2.744
+otf.version=2.745
otf.cache=containers.define("fonts","otf",otf.version,true)
local fontdata=fonts.hashes.identifiers
local chardata=characters and characters.data
@@ -5204,6 +5204,7 @@ local packdata=true
local syncspace=true
local forcenotdef=false
local includesubfonts=false
+local overloadkerns=false
local wildcard="*"
local default="dflt"
local fontloaderfields=fontloader.fields
@@ -5215,6 +5216,7 @@ registerdirective("fonts.otf.loader.usemetatables",function(v) usemetatables=v e
registerdirective("fonts.otf.loader.pack",function(v) packdata=v end)
registerdirective("fonts.otf.loader.syncspace",function(v) syncspace=v end)
registerdirective("fonts.otf.loader.forcenotdef",function(v) forcenotdef=v end)
+registerdirective("fonts.otf.loader.overloadkerns",function(v) overloadkerns=v end)
local function load_featurefile(raw,featurefile)
if featurefile and featurefile~="" then
if trace_loading then
@@ -6495,74 +6497,93 @@ actions["merge kern classes"]=function(data,filename,raw)
local resources=data.resources
local unicodes=resources.unicodes
local splitter=data.helpers.tounicodetable
+ local ignored=0
+ local blocked=0
for gp=1,#gposlist do
local gpos=gposlist[gp]
local subtables=gpos.subtables
if subtables then
+ local first_done={}
+ local split={}
for s=1,#subtables do
local subtable=subtables[s]
local kernclass=subtable.kernclass
+ local lookup=subtable.lookup or subtable.name
if kernclass then
- local split={}
- for k=1,#kernclass do
- local kcl=kernclass[k]
- local firsts=kcl.firsts
- local seconds=kcl.seconds
- local offsets=kcl.offsets
- local lookups=kcl.lookup
- if type(lookups)~="table" then
- lookups={ lookups }
- end
- for n,s in next,firsts do
- split[s]=split[s] or lpegmatch(splitter,s)
- end
- local maxseconds=0
- for n,s in next,seconds do
- if n>maxseconds then
- maxseconds=n
- end
- split[s]=split[s] or lpegmatch(splitter,s)
+ if #kernclass>0 then
+ kernclass=kernclass[1]
+ lookup=type(kernclass.lookup)=="string" and kernclass.lookup or lookup
+ report_otf("fixing kernclass table of lookup %a",lookup)
+ end
+ local firsts=kernclass.firsts
+ local seconds=kernclass.seconds
+ local offsets=kernclass.offsets
+ for n,s in next,firsts do
+ split[s]=split[s] or lpegmatch(splitter,s)
+ end
+ local maxseconds=0
+ for n,s in next,seconds do
+ if n>maxseconds then
+ maxseconds=n
end
- for l=1,#lookups do
- local lookup=lookups[l]
- for fk=1,#firsts do
- local fv=firsts[fk]
- local splt=split[fv]
- if splt then
- local extrakerns={}
- local baseoffset=(fk-1)*maxseconds
- for sk=2,maxseconds do
- local sv=seconds[sk]
- local splt=split[sv]
- if splt then
- local offset=offsets[baseoffset+sk]
- if offset then
- for i=1,#splt do
- extrakerns[splt[i]]=offset
- end
- end
+ split[s]=split[s] or lpegmatch(splitter,s)
+ end
+ for fk=1,#firsts do
+ local fv=firsts[fk]
+ local splt=split[fv]
+ if splt then
+ local extrakerns={}
+ local baseoffset=(fk-1)*maxseconds
+ for sk=2,maxseconds do
+ local sv=seconds[sk]
+ local splt=split[sv]
+ if splt then
+ local offset=offsets[baseoffset+sk]
+ if offset then
+ for i=1,#splt do
+ extrakerns[splt[i]]=offset
end
end
- for i=1,#splt do
- local first_unicode=splt[i]
- local description=descriptions[first_unicode]
- if description then
- local kerns=description.kerns
- if not kerns then
- kerns={}
- description.kerns=kerns
- end
- local lookupkerns=kerns[lookup]
- if not lookupkerns then
- lookupkerns={}
- kerns[lookup]=lookupkerns
- end
+ end
+ end
+ for i=1,#splt do
+ local first_unicode=splt[i]
+ if first_done[first_unicode] then
+ report_otf("lookup %a: ignoring further kerns of %C",lookup,first_unicode)
+ blocked=blocked+1
+ else
+ first_done[first_unicode]=true
+ local description=descriptions[first_unicode]
+ if description then
+ local kerns=description.kerns
+ if not kerns then
+ kerns={}
+ description.kerns=kerns
+ end
+ local lookupkerns=kerns[lookup]
+ if not lookupkerns then
+ lookupkerns={}
+ kerns[lookup]=lookupkerns
+ end
+ if overloadkerns then
for second_unicode,kern in next,extrakerns do
lookupkerns[second_unicode]=kern
end
- elseif trace_loading then
- report_otf("no glyph data for %U",first_unicode)
+ else
+ for second_unicode,kern in next,extrakerns do
+ local k=lookupkerns[second_unicode]
+ if not k then
+ lookupkerns[second_unicode]=kern
+ elseif k~=kern then
+ if trace_loading then
+ report_otf("lookup %a: ignoring overload of kern between %C and %C, rejecting %a, keeping %a",lookup,first_unicode,second_unicode,k,kern)
+ end
+ ignored=ignored+1
+ end
+ end
end
+ elseif trace_loading then
+ report_otf("no glyph data for %U",first_unicode)
end
end
end
@@ -6573,6 +6594,12 @@ actions["merge kern classes"]=function(data,filename,raw)
end
end
end
+ if ignored>0 then
+ report_otf("%s kern overloads ignored",ignored)
+ end
+ if blocked>0 then
+ report_otf("%s succesive kerns blocked",blocked)
+ end
end
end
actions["check glyphs"]=function(data,filename,raw)