summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorContext Git Mirror Bot <phg42.2a@gmail.com>2015-06-13 10:15:03 +0200
committerContext Git Mirror Bot <phg42.2a@gmail.com>2015-06-13 10:15:03 +0200
commitbafe29de59fdc2a37922e198aebc9b04f518f152 (patch)
tree0712e772ffb501f01c36ad58f2f2761c38dc0322
parentc727ed7331960718681fa4222bec81fb577b56fb (diff)
downloadcontext-bafe29de59fdc2a37922e198aebc9b04f518f152.tar.gz
2015-06-13 09:54:00
-rw-r--r--metapost/context/base/mp-mlib.mpiv13
-rw-r--r--scripts/context/lua/mtx-fonts.lua7
-rw-r--r--scripts/context/lua/mtxrun.lua15
-rw-r--r--scripts/context/stubs/mswin/mtxrun.lua15
-rw-r--r--scripts/context/stubs/unix/mtxrun15
-rw-r--r--scripts/context/stubs/win64/mtxrun.lua15
-rw-r--r--tex/context/base/cont-new.mkiv2
-rw-r--r--tex/context/base/context-version.pdfbin4205 -> 4202 bytes
-rw-r--r--tex/context/base/context.mkiv2
-rw-r--r--tex/context/base/font-afm.lua18
-rw-r--r--tex/context/base/font-cff.lua1441
-rw-r--r--tex/context/base/font-con.lua17
-rw-r--r--tex/context/base/font-lib.mkvi12
-rw-r--r--tex/context/base/font-mis.lua2
-rw-r--r--tex/context/base/font-mps.lua379
-rw-r--r--tex/context/base/font-off.lua228
-rw-r--r--tex/context/base/font-otf.lua25
-rw-r--r--tex/context/base/font-otr.lua1816
-rw-r--r--tex/context/base/font-pat.lua17
-rw-r--r--tex/context/base/font-syn.lua276
-rw-r--r--tex/context/base/font-tmp.lua120
-rw-r--r--tex/context/base/font-ttf.lua475
-rw-r--r--tex/context/base/lxml-lpt.lua4
-rw-r--r--tex/context/base/meta-imp-outlines.mkiv150
-rw-r--r--tex/context/base/mult-def.mkiv3
-rw-r--r--tex/context/base/publ-ini.lua7
-rw-r--r--tex/context/base/publ-ini.mkiv4
-rw-r--r--tex/context/base/status-files.pdfbin24422 -> 24425 bytes
-rw-r--r--tex/context/base/status-lua.pdfbin251556 -> 252045 bytes
-rw-r--r--tex/context/base/util-tab.lua4
-rw-r--r--tex/generic/context/luatex/luatex-fonts-merged.lua41
31 files changed, 4856 insertions, 267 deletions
diff --git a/metapost/context/base/mp-mlib.mpiv b/metapost/context/base/mp-mlib.mpiv
index 6d4894fb6..94377e52d 100644
--- a/metapost/context/base/mp-mlib.mpiv
+++ b/metapost/context/base/mp-mlib.mpiv
@@ -768,6 +768,8 @@ vardef mfun_do_outline_text_flush (expr kind, n, x, y) (text t) =
mfun_do_outline_text_b (n, x, y) (t)
elseif kind = "r" :
mfun_do_outline_text_r (n, x, y) (t)
+ elseif kind = "p" :
+ mfun_do_outline_text_p (n, x, y) (t)
else :
mfun_do_outline_text_n (n, x, y) (t)
fi ;
@@ -793,6 +795,12 @@ vardef mfun_do_outline_text_d (expr n, x, y) (text t) =
endfor ;
enddef ;
+vardef mfun_do_outline_text_p (expr n, x, y) (text t) =
+ for i=t :
+ draw i shifted(x,y) ;
+ endfor ;
+enddef ;
+
vardef mfun_do_outline_text_b (expr n, x, y) (text t) =
mfun_do_outline_n := 0 ;
for i=t :
@@ -857,6 +865,9 @@ vardef mfun_do_outline_text_set_n text r =
def mfun_do_outline_options_r = r enddef ;
enddef ;
+vardef mfun_do_outline_text_set_p =
+enddef ;
+
def mfun_do_outline_options_d = enddef ;
def mfun_do_outline_options_f = enddef ;
def mfun_do_outline_options_r = enddef ;
@@ -882,6 +893,8 @@ vardef outlinetext@# (expr t) text rest =
mfun_do_outline_text_set_b rest ;
elseif kind = "r" :
mfun_do_outline_text_set_r rest ;
+ elseif kind = "p" :
+ mfun_do_outline_text_set_p ;
else :
mfun_do_outline_text_set_n rest ;
fi ;
diff --git a/scripts/context/lua/mtx-fonts.lua b/scripts/context/lua/mtx-fonts.lua
index 694e6a649..808f20358 100644
--- a/scripts/context/lua/mtx-fonts.lua
+++ b/scripts/context/lua/mtx-fonts.lua
@@ -84,6 +84,13 @@ local report = application.report
if not fontloader then fontloader = fontforge end
+dofile(resolvers.findfile("font-otr.lua","tex"))
+dofile(resolvers.findfile("font-cff.lua","tex"))
+dofile(resolvers.findfile("font-ttf.lua","tex"))
+dofile(resolvers.findfile("font-tmp.lua","tex"))
+------(resolvers.findfile("font-dsp.lua","tex"))
+------(resolvers.findfile("font-off.lua","tex"))
+
dofile(resolvers.findfile("font-otp.lua","tex")) -- we need to unpack the font for analysis
dofile(resolvers.findfile("font-syn.lua","tex"))
dofile(resolvers.findfile("font-trt.lua","tex"))
diff --git a/scripts/context/lua/mtxrun.lua b/scripts/context/lua/mtxrun.lua
index f3a794e4d..111691f61 100644
--- a/scripts/context/lua/mtxrun.lua
+++ b/scripts/context/lua/mtxrun.lua
@@ -5904,7 +5904,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-tab"] = package.loaded["util-tab"] or true
--- original size: 27822, stripped down to: 18037
+-- original size: 27840, stripped down to: 18055
if not modules then modules={} end modules ['util-tab']={
version=1.001,
@@ -6340,10 +6340,10 @@ local f_table_direct=formatters["{"]
local f_table_entry=formatters["[%q]={"]
local f_table_finish=formatters["}"]
local spaces=utilities.strings.newrepeater(" ")
-local serialize=table.serialize
+local original_serialize=table.serialize
local function serialize(root,name,specification)
if type(specification)=="table" then
- return serialize(root,name,specification)
+ return original_serialize(root,name,specification)
end
local t
local n=1
@@ -10772,7 +10772,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-lpt"] = package.loaded["lxml-lpt"] or true
--- original size: 48229, stripped down to: 30684
+-- original size: 48172, stripped down to: 30632
if not modules then modules={} end modules ['lxml-lpt']={
version=1.001,
@@ -11362,13 +11362,12 @@ local function tagstostring(list)
end
xml.nodesettostring=nodesettostring
local lpath
-local lshowoptions={ functions=false }
local function lshow(parsed)
if type(parsed)=="string" then
parsed=lpath(parsed)
end
report_lpath("%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,
- table.serialize(parsed,false,lshowoptions))
+ table.serialize(parsed,false))
end
xml.lshow=lshow
local function add_comment(p,str)
@@ -18109,8 +18108,8 @@ end -- of closure
-- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-fil.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 752587
--- stripped bytes : 271654
+-- original bytes : 752548
+-- stripped bytes : 271649
-- end library merge
diff --git a/scripts/context/stubs/mswin/mtxrun.lua b/scripts/context/stubs/mswin/mtxrun.lua
index f3a794e4d..111691f61 100644
--- a/scripts/context/stubs/mswin/mtxrun.lua
+++ b/scripts/context/stubs/mswin/mtxrun.lua
@@ -5904,7 +5904,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-tab"] = package.loaded["util-tab"] or true
--- original size: 27822, stripped down to: 18037
+-- original size: 27840, stripped down to: 18055
if not modules then modules={} end modules ['util-tab']={
version=1.001,
@@ -6340,10 +6340,10 @@ local f_table_direct=formatters["{"]
local f_table_entry=formatters["[%q]={"]
local f_table_finish=formatters["}"]
local spaces=utilities.strings.newrepeater(" ")
-local serialize=table.serialize
+local original_serialize=table.serialize
local function serialize(root,name,specification)
if type(specification)=="table" then
- return serialize(root,name,specification)
+ return original_serialize(root,name,specification)
end
local t
local n=1
@@ -10772,7 +10772,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-lpt"] = package.loaded["lxml-lpt"] or true
--- original size: 48229, stripped down to: 30684
+-- original size: 48172, stripped down to: 30632
if not modules then modules={} end modules ['lxml-lpt']={
version=1.001,
@@ -11362,13 +11362,12 @@ local function tagstostring(list)
end
xml.nodesettostring=nodesettostring
local lpath
-local lshowoptions={ functions=false }
local function lshow(parsed)
if type(parsed)=="string" then
parsed=lpath(parsed)
end
report_lpath("%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,
- table.serialize(parsed,false,lshowoptions))
+ table.serialize(parsed,false))
end
xml.lshow=lshow
local function add_comment(p,str)
@@ -18109,8 +18108,8 @@ end -- of closure
-- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-fil.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 752587
--- stripped bytes : 271654
+-- original bytes : 752548
+-- stripped bytes : 271649
-- end library merge
diff --git a/scripts/context/stubs/unix/mtxrun b/scripts/context/stubs/unix/mtxrun
index f3a794e4d..111691f61 100644
--- a/scripts/context/stubs/unix/mtxrun
+++ b/scripts/context/stubs/unix/mtxrun
@@ -5904,7 +5904,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-tab"] = package.loaded["util-tab"] or true
--- original size: 27822, stripped down to: 18037
+-- original size: 27840, stripped down to: 18055
if not modules then modules={} end modules ['util-tab']={
version=1.001,
@@ -6340,10 +6340,10 @@ local f_table_direct=formatters["{"]
local f_table_entry=formatters["[%q]={"]
local f_table_finish=formatters["}"]
local spaces=utilities.strings.newrepeater(" ")
-local serialize=table.serialize
+local original_serialize=table.serialize
local function serialize(root,name,specification)
if type(specification)=="table" then
- return serialize(root,name,specification)
+ return original_serialize(root,name,specification)
end
local t
local n=1
@@ -10772,7 +10772,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-lpt"] = package.loaded["lxml-lpt"] or true
--- original size: 48229, stripped down to: 30684
+-- original size: 48172, stripped down to: 30632
if not modules then modules={} end modules ['lxml-lpt']={
version=1.001,
@@ -11362,13 +11362,12 @@ local function tagstostring(list)
end
xml.nodesettostring=nodesettostring
local lpath
-local lshowoptions={ functions=false }
local function lshow(parsed)
if type(parsed)=="string" then
parsed=lpath(parsed)
end
report_lpath("%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,
- table.serialize(parsed,false,lshowoptions))
+ table.serialize(parsed,false))
end
xml.lshow=lshow
local function add_comment(p,str)
@@ -18109,8 +18108,8 @@ end -- of closure
-- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-fil.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 752587
--- stripped bytes : 271654
+-- original bytes : 752548
+-- stripped bytes : 271649
-- end library merge
diff --git a/scripts/context/stubs/win64/mtxrun.lua b/scripts/context/stubs/win64/mtxrun.lua
index f3a794e4d..111691f61 100644
--- a/scripts/context/stubs/win64/mtxrun.lua
+++ b/scripts/context/stubs/win64/mtxrun.lua
@@ -5904,7 +5904,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-tab"] = package.loaded["util-tab"] or true
--- original size: 27822, stripped down to: 18037
+-- original size: 27840, stripped down to: 18055
if not modules then modules={} end modules ['util-tab']={
version=1.001,
@@ -6340,10 +6340,10 @@ local f_table_direct=formatters["{"]
local f_table_entry=formatters["[%q]={"]
local f_table_finish=formatters["}"]
local spaces=utilities.strings.newrepeater(" ")
-local serialize=table.serialize
+local original_serialize=table.serialize
local function serialize(root,name,specification)
if type(specification)=="table" then
- return serialize(root,name,specification)
+ return original_serialize(root,name,specification)
end
local t
local n=1
@@ -10772,7 +10772,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-lpt"] = package.loaded["lxml-lpt"] or true
--- original size: 48229, stripped down to: 30684
+-- original size: 48172, stripped down to: 30632
if not modules then modules={} end modules ['lxml-lpt']={
version=1.001,
@@ -11362,13 +11362,12 @@ local function tagstostring(list)
end
xml.nodesettostring=nodesettostring
local lpath
-local lshowoptions={ functions=false }
local function lshow(parsed)
if type(parsed)=="string" then
parsed=lpath(parsed)
end
report_lpath("%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,
- table.serialize(parsed,false,lshowoptions))
+ table.serialize(parsed,false))
end
xml.lshow=lshow
local function add_comment(p,str)
@@ -18109,8 +18108,8 @@ end -- of closure
-- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-fil.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 752587
--- stripped bytes : 271654
+-- original bytes : 752548
+-- stripped bytes : 271649
-- end library merge
diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv
index 45216352b..399111c91 100644
--- a/tex/context/base/cont-new.mkiv
+++ b/tex/context/base/cont-new.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2015.06.12 10:06}
+\newcontextversion{2015.06.13 09:52}
%D This file is loaded at runtime, thereby providing an excellent place for
%D hacks, patches, extensions and new features.
diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf
index 97d3058a2..50515ecec 100644
--- a/tex/context/base/context-version.pdf
+++ b/tex/context/base/context-version.pdf
Binary files differ
diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv
index df01cab5a..462bbb538 100644
--- a/tex/context/base/context.mkiv
+++ b/tex/context/base/context.mkiv
@@ -39,7 +39,7 @@
%D up and the dependencies are more consistent.
\edef\contextformat {\jobname}
-\edef\contextversion{2015.06.12 10:06}
+\edef\contextversion{2015.06.13 09:52}
\edef\contextkind {beta}
%D For those who want to use this:
diff --git a/tex/context/base/font-afm.lua b/tex/context/base/font-afm.lua
index a96c6686e..329639b85 100644
--- a/tex/context/base/font-afm.lua
+++ b/tex/context/base/font-afm.lua
@@ -152,14 +152,14 @@ end
local keys = { }
-function keys.FontName (data,line) data.metadata.fontname = strip (line) -- get rid of spaces
- data.metadata.fullname = strip (line) end
-function keys.ItalicAngle (data,line) data.metadata.italicangle = tonumber (line) end
-function keys.IsFixedPitch(data,line) data.metadata.isfixedpitch = toboolean(line,true) end
-function keys.CharWidth (data,line) data.metadata.charwidth = tonumber (line) end
-function keys.XHeight (data,line) data.metadata.xheight = tonumber (line) end
-function keys.Descender (data,line) data.metadata.descender = tonumber (line) end
-function keys.Ascender (data,line) data.metadata.ascender = tonumber (line) end
+function keys.FontName (data,line) data.metadata.fontname = strip (line) -- get rid of spaces
+ data.metadata.fullname = strip (line) end
+function keys.ItalicAngle (data,line) data.metadata.italicangle = tonumber (line) end
+function keys.IsFixedPitch(data,line) data.metadata.monospaced = toboolean(line,true) end
+function keys.CharWidth (data,line) data.metadata.charwidth = tonumber (line) end
+function keys.XHeight (data,line) data.metadata.xheight = tonumber (line) end
+function keys.Descender (data,line) data.metadata.descender = tonumber (line) end
+function keys.Ascender (data,line) data.metadata.ascender = tonumber (line) end
function keys.Comment (data,line)
-- Comment DesignSize 12 (pts)
-- Comment TFM designsize: 12 (in points)
@@ -640,7 +640,7 @@ local function copytotfm(data)
local spacer = "space"
local spaceunits = 500
--
- local monospaced = metadata.isfixedpitch
+ local monospaced = metadata.monospaced
local charwidth = metadata.charwidth
local italicangle = metadata.italicangle
local charxheight = metadata.xheight and metadata.xheight > 0 and metadata.xheight
diff --git a/tex/context/base/font-cff.lua b/tex/context/base/font-cff.lua
new file mode 100644
index 000000000..271de834f
--- /dev/null
+++ b/tex/context/base/font-cff.lua
@@ -0,0 +1,1441 @@
+if not modules then modules = { } end modules ['font-cff'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- todo: option.outlines
+-- todo: option.boundingbox
+-- per charstring (less memory)
+
+-- This is a heavy one as it is a rather packed format. We don't need al the information
+-- now but we might need it later (who know what magic we can do with metapost). So at
+-- some point this might become a module. We just follow Adobe Technical Notes #5176 and
+-- #5177. In case of doubt I looked in the fontforge code that comes with LuaTeX.
+
+-- For now we save the segments in a list of segments with the operator last in an entry
+-- because that reflects the original. But it might make more sense to use a single array
+-- per segment. For pdf a simple concat works ok, but for other purposes a operator first
+-- flush is nicer.
+
+local next, type = next, type
+local byte = string.byte
+local concat, remove = table.concat, table.remove
+local floor, abs, round, ceil = math.floor, math.abs, math.round, math.ceil
+local P, C, R, S, C, Cs, Ct = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Ct
+local lpegmatch = lpeg.match
+
+local files = utilities.files
+
+local readbytes = files.readbytes
+local readstring = files.readstring
+local readbyte = files.readcardinal1 -- 8-bit unsigned integer
+local readushort = files.readcardinal2 -- 16-bit unsigned integer
+local readuint = files.readcardinal3 -- 24-bit unsigned integer
+local readulong = files.readcardinal4 -- 24-bit unsigned integer
+
+local setmetatableindex = table.setmetatableindex
+
+local trace_charstrings = false trackers.register("fonts.cff.charstrings",function(v) trace_charstrings = v end)
+local report = logs.reporter("otf reader","cff")
+
+local parsetopdictionary
+local parsecharstrings
+local parseprivates
+
+local defaultstrings = { [0] = -- hijacked from ff
+ ".notdef", "space", "exclam", "quotedbl", "numbersign", "dollar", "percent",
+ "ampersand", "quoteright", "parenleft", "parenright", "asterisk", "plus",
+ "comma", "hyphen", "period", "slash", "zero", "one", "two", "three", "four",
+ "five", "six", "seven", "eight", "nine", "colon", "semicolon", "less",
+ "equal", "greater", "question", "at", "A", "B", "C", "D", "E", "F", "G", "H",
+ "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W",
+ "X", "Y", "Z", "bracketleft", "backslash", "bracketright", "asciicircum",
+ "underscore", "quoteleft", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j",
+ "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y",
+ "z", "braceleft", "bar", "braceright", "asciitilde", "exclamdown", "cent",
+ "sterling", "fraction", "yen", "florin", "section", "currency",
+ "quotesingle", "quotedblleft", "guillemotleft", "guilsinglleft",
+ "guilsinglright", "fi", "fl", "endash", "dagger", "daggerdbl",
+ "periodcentered", "paragraph", "bullet", "quotesinglbase", "quotedblbase",
+ "quotedblright", "guillemotright", "ellipsis", "perthousand", "questiondown",
+ "grave", "acute", "circumflex", "tilde", "macron", "breve", "dotaccent",
+ "dieresis", "ring", "cedilla", "hungarumlaut", "ogonek", "caron", "emdash",
+ "AE", "ordfeminine", "Lslash", "Oslash", "OE", "ordmasculine", "ae",
+ "dotlessi", "lslash", "oslash", "oe", "germandbls", "onesuperior",
+ "logicalnot", "mu", "trademark", "Eth", "onehalf", "plusminus", "Thorn",
+ "onequarter", "divide", "brokenbar", "degree", "thorn", "threequarters",
+ "twosuperior", "registered", "minus", "eth", "multiply", "threesuperior",
+ "copyright", "Aacute", "Acircumflex", "Adieresis", "Agrave", "Aring",
+ "Atilde", "Ccedilla", "Eacute", "Ecircumflex", "Edieresis", "Egrave",
+ "Iacute", "Icircumflex", "Idieresis", "Igrave", "Ntilde", "Oacute",
+ "Ocircumflex", "Odieresis", "Ograve", "Otilde", "Scaron", "Uacute",
+ "Ucircumflex", "Udieresis", "Ugrave", "Yacute", "Ydieresis", "Zcaron",
+ "aacute", "acircumflex", "adieresis", "agrave", "aring", "atilde",
+ "ccedilla", "eacute", "ecircumflex", "edieresis", "egrave", "iacute",
+ "icircumflex", "idieresis", "igrave", "ntilde", "oacute", "ocircumflex",
+ "odieresis", "ograve", "otilde", "scaron", "uacute", "ucircumflex",
+ "udieresis", "ugrave", "yacute", "ydieresis", "zcaron", "exclamsmall",
+ "Hungarumlautsmall", "dollaroldstyle", "dollarsuperior", "ampersandsmall",
+ "Acutesmall", "parenleftsuperior", "parenrightsuperior", "twodotenleader",
+ "onedotenleader", "zerooldstyle", "oneoldstyle", "twooldstyle",
+ "threeoldstyle", "fouroldstyle", "fiveoldstyle", "sixoldstyle",
+ "sevenoldstyle", "eightoldstyle", "nineoldstyle", "commasuperior",
+ "threequartersemdash", "periodsuperior", "questionsmall", "asuperior",
+ "bsuperior", "centsuperior", "dsuperior", "esuperior", "isuperior",
+ "lsuperior", "msuperior", "nsuperior", "osuperior", "rsuperior", "ssuperior",
+ "tsuperior", "ff", "ffi", "ffl", "parenleftinferior", "parenrightinferior",
+ "Circumflexsmall", "hyphensuperior", "Gravesmall", "Asmall", "Bsmall",
+ "Csmall", "Dsmall", "Esmall", "Fsmall", "Gsmall", "Hsmall", "Ismall",
+ "Jsmall", "Ksmall", "Lsmall", "Msmall", "Nsmall", "Osmall", "Psmall",
+ "Qsmall", "Rsmall", "Ssmall", "Tsmall", "Usmall", "Vsmall", "Wsmall",
+ "Xsmall", "Ysmall", "Zsmall", "colonmonetary", "onefitted", "rupiah",
+ "Tildesmall", "exclamdownsmall", "centoldstyle", "Lslashsmall",
+ "Scaronsmall", "Zcaronsmall", "Dieresissmall", "Brevesmall", "Caronsmall",
+ "Dotaccentsmall", "Macronsmall", "figuredash", "hypheninferior",
+ "Ogoneksmall", "Ringsmall", "Cedillasmall", "questiondownsmall", "oneeighth",
+ "threeeighths", "fiveeighths", "seveneighths", "onethird", "twothirds",
+ "zerosuperior", "foursuperior", "fivesuperior", "sixsuperior",
+ "sevensuperior", "eightsuperior", "ninesuperior", "zeroinferior",
+ "oneinferior", "twoinferior", "threeinferior", "fourinferior",
+ "fiveinferior", "sixinferior", "seveninferior", "eightinferior",
+ "nineinferior", "centinferior", "dollarinferior", "periodinferior",
+ "commainferior", "Agravesmall", "Aacutesmall", "Acircumflexsmall",
+ "Atildesmall", "Adieresissmall", "Aringsmall", "AEsmall", "Ccedillasmall",
+ "Egravesmall", "Eacutesmall", "Ecircumflexsmall", "Edieresissmall",
+ "Igravesmall", "Iacutesmall", "Icircumflexsmall", "Idieresissmall",
+ "Ethsmall", "Ntildesmall", "Ogravesmall", "Oacutesmall", "Ocircumflexsmall",
+ "Otildesmall", "Odieresissmall", "OEsmall", "Oslashsmall", "Ugravesmall",
+ "Uacutesmall", "Ucircumflexsmall", "Udieresissmall", "Yacutesmall",
+ "Thornsmall", "Ydieresissmall", "001.000", "001.001", "001.002", "001.003",
+ "Black", "Bold", "Book", "Light", "Medium", "Regular", "Roman", "Semibold",
+}
+
+local cffreaders = {
+ readbyte,
+ readushort,
+ readuint,
+ readulong,
+}
+
+-- The header contains information about its own size.
+
+local function readheader(f)
+ local header = {
+ offset = f:seek("cur"),
+ major = readbyte(f),
+ minor = readbyte(f),
+ size = readbyte(f), -- headersize
+ osize = readbyte(f), -- for offsets to start
+ }
+ f:seek("set",header.offset+header.size)
+ return header
+end
+
+-- The indexes all look the same, so we share a loader. We could pass a handler
+-- and run over the array but why bother, we only have a few uses.
+
+local function readlengths(f)
+ local count = readushort(f)
+ if count == 0 then
+ return { }
+ end
+ local osize = readbyte(f)
+ local read = cffreaders[osize]
+ if not read then
+ report("bad offset size: %i",osize)
+ return { }
+ end
+ local lengths = { }
+ local previous = read(f)
+ for i=1,count do
+ local offset = read(f)
+ lengths[i] = offset - previous
+ previous = offset
+ end
+ return lengths
+end
+
+-- There can be subfonts so names is an array. However, in our case it's always
+-- one font. The same is true for the top dictionaries. Watch how we only load
+-- the dictionary string as for interpretation we need to have the strings loaded
+-- as well.
+
+local function readfontnames(f)
+ local names = readlengths(f)
+ for i=1,#names do
+ names[i] = readstring(f,names[i])
+ end
+ return names
+end
+
+local function readtopdictionaries(f)
+ local dictionaries = readlengths(f)
+ for i=1,#dictionaries do
+ dictionaries[i] = readstring(f,dictionaries[i])
+ end
+ return dictionaries
+end
+
+-- Strings are added to a list of standard strings so we start the font specific
+-- one with an offset. Strings are shared so we have one table.
+
+local function readstrings(f)
+ local lengths = readlengths(f)
+ local strings = setmetatableindex({ }, defaultstrings)
+ local index = #defaultstrings
+ for i=1,#lengths do
+ index = index + 1
+ strings[index] = readstring(f,lengths[i])
+ end
+ return strings
+end
+
+-- Parsing the dictionaries is delayed till we have the strings loaded. The parser
+-- is stack based so the operands come before the operator (like in postscript).
+
+-- local function delta(t)
+-- local n = #t
+-- if n > 1 then
+-- local p = t[1]
+-- for i=2,n do
+-- local c = t[i]
+-- t[i] = c + p
+-- p = c
+-- end
+-- end
+-- end
+
+do
+
+ -- We use a closure so that we don't need to pass too much around.
+
+ local stack = { }
+ local top = 0
+ local result = { }
+ local strings = { }
+
+ local p_single =
+ P("\00") / function()
+ result.version = strings[stack[top]] or "unset"
+ top = 0
+ end
+ + P("\01") / function()
+ result.notice = strings[stack[top]] or "unset"
+ top = 0
+ end
+ + P("\02") / function()
+ result.fullname = strings[stack[top]] or "unset"
+ top = 0
+ end
+ + P("\03") / function()
+ result.familyname = strings[stack[top]] or "unset"
+ top = 0
+ end
+ + P("\04") / function()
+ result.weight = strings[stack[top]] or "unset"
+ top = 0
+ end
+ + P("\05") / function()
+ result.fontbbox = { unpack(stack,1,4) }
+ top = 0
+ end
+ -- + P("\06") / function() end -- bluevalues
+ -- + P("\07") / function() end -- otherblues
+ -- + P("\08") / function() end -- familyblues
+ -- + P("\09") / function() end -- familyotherblues
+ -- + P("\10") / function() end -- strhw
+ -- + P("\11") / function() end -- stdvw
+ + P("\13") / function()
+ result.uniqueid = stack[top]
+ top = 0
+ end
+ + P("\14") / function()
+ result.xuid = concat(stack,"",1,top)
+ top = 0
+ end
+ + P("\15") / function()
+ result.charset = stack[top]
+ top = 0
+ end
+ + P("\16") / function()
+ result.encoding = stack[top]
+ top = 0
+ end
+ + P("\17") / function()
+ result.charstrings = stack[top]
+ top = 0
+ end
+ + P("\18") / function()
+ result.private = {
+ size = stack[top-1],
+ offset = stack[top],
+ }
+ top = 0
+ end
+ + P("\19") / function()
+ result.subroutines = stack[top]
+ end
+ + P("\20") / function()
+ result.defaultwidthx = stack[top]
+ end
+ + P("\21") / function()
+ result.nominalwidthx = stack[top]
+ end
+ -- + P("\22") / function() end -- reserved
+ -- + P("\23") / function() end -- reserved
+ -- + P("\24") / function() end -- reserved
+ -- + P("\25") / function() end -- reserved
+ -- + P("\26") / function() end -- reserved
+ -- + P("\27") / function() end -- reserved
+
+ local p_double = P("\12") * (
+ P("\00") / function()
+ result.copyright = stack[top]
+ top = 0
+ end
+ + P("\01") / function()
+ result.monospaced = stack[top] == 1 and true or false -- isfixedpitch
+ top = 0
+ end
+ + P("\02") / function()
+ result.italicangle = stack[top]
+ top = 0
+ end
+ + P("\03") / function()
+ result.underlineposition = stack[top]
+ top = 0
+ end
+ + P("\04") / function()
+ result.underlinethickness = stack[top]
+ top = 0
+ end
+ + P("\05") / function()
+ result.painttype = stack[top]
+ top = 0
+ end
+ + P("\06") / function()
+ result.charstringtype = stack[top]
+ top = 0
+ end
+ + P("\07") / function()
+ result.fontmatrix = { unpack(stack,1,6) }
+ top = 0
+ end
+ + P("\08") / function()
+ result.strokewidth = stack[top]
+ top = 0
+ end
+ + P("\20") / function()
+ result.syntheticbase = stack[top]
+ top = 0
+ end
+ + P("\21") / function()
+ result.postscript = strings[stack[top]] or "unset"
+ top = 0
+ end
+ + P("\22") / function()
+ result.basefontname = strings[stack[top]] or "unset"
+ top = 0
+ end
+ + P("\21") / function()
+ result.basefontblend = stack[top]
+ top = 0
+ end
+ + P("\30") / function()
+ result.cid.registry = strings[stack[top-2]] or "unset"
+ result.cid.ordering = strings[stack[top-1]] or "unset"
+ result.cid.supplement = stack[top]
+ top = 0
+ end
+ + P("\31") / function()
+ result.cid.fontversion = stack[top]
+ top = 0
+ end
+ + P("\32") / function()
+ result.cid.fontrevision= stack[top]
+ top = 0
+ end
+ + P("\33") / function()
+ result.cid.fonttype = stack[top]
+ top = 0
+ end
+ + P("\34") / function()
+ result.cid.count = stack[top]
+ top = 0
+ end
+ + P("\35") / function()
+ result.cid.uidbase = stack[top]
+ top = 0
+ end
+ + P("\36") / function()
+ result.cid.fdarray = stack[top]
+ top = 0
+ end
+ + P("\37") / function()
+ result.cid.fdselect = stack[top]
+ top = 0
+ end
+ + P("\38") / function()
+ result.cid.fontname = strings[stack[top]] or "unset"
+ top = 0
+ end
+ )
+
+ -- Some lpeg fun ... a first variant split the byte and made a new string but
+ -- the second variant is much faster. Not that it matters much as we don't see
+ -- such numbers often.
+
+ local p_last = P("\x0F") / "0" + P("\x1F") / "1" + P("\x2F") / "2" + P("\x3F") / "3"
+ + P("\x4F") / "4" + P("\x5F") / "5" + P("\x6F") / "6" + P("\x7F") / "7"
+ + P("\x8F") / "8" + P("\x9F") / "9" + P("\xAF") / "" + P("\xBF") / ""
+ + P("\xCF") / "" + P("\xDF") / "" + P("\xEF") / "" + R("\xF0\xFF") / ""
+
+ -- local remap = { [0] =
+ -- "00", "01", "02", "03", "04", "05", "06", "07", "08", "09", "0.", "0E", "0E-", "0", "0-", "0",
+ -- "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "0.", "0E", "0E-", "0", "0-", "0",
+ -- "20", "21", "22", "23", "24", "25", "26", "27", "28", "29", "0.", "0E", "0E-", "0", "0-", "0",
+ -- "30", "31", "32", "33", "34", "35", "36", "37", "38", "39", "0.", "0E", "0E-", "0", "0-", "0",
+ -- "40", "41", "42", "43", "44", "45", "46", "47", "48", "49", "0.", "0E", "0E-", "0", "0-", "0",
+ -- "50", "51", "52", "53", "54", "55", "56", "57", "58", "59", "0.", "0E", "0E-", "0", "0-", "0",
+ -- "60", "61", "62", "63", "64", "65", "66", "67", "68", "69", "0.", "0E", "0E-", "0", "0-", "0",
+ -- "70", "71", "72", "73", "74", "75", "76", "77", "78", "79", "0.", "0E", "0E-", "0", "0-", "0",
+ -- "80", "81", "82", "83", "84", "85", "86", "87", "88", "89", "0.", "0E", "0E-", "0", "0-", "0",
+ -- "90", "91", "92", "93", "94", "95", "96", "97", "98", "99", "0.", "0E", "0E-", "0", "0-", "0",
+ -- ".0", ".1", ".2", ".3", ".4", ".5", ".6", ".7", ".8", ".9", "..", ".E", ".E-", ".", ".-", ".",
+ -- "E0", "E1", "E2", "E3", "E4", "E5", "E6", "E7", "E8", "E9", "E.", "EE", "EE-", "E", "E-", "E",
+ -- "E-0", "E-1", "E-2", "E-3", "E-4", "E-5", "E-6", "E-7", "E-8", "E-9", "E-.", "E-E", "E-E-", "E-", "E--", "E-",
+ -- "-0", "-1", "-2", "-3", "-4", "-5", "-6", "-7", "-8", "-9", "-.", "-E", "-E-", "-", "--", "-",
+ -- }
+
+ -- local p_nibbles = Cs(((1-p_last)/byte/remap)^0+p_last)
+
+ -- local p = P("\30") * p_nibbles / function(t)
+ -- print(tonumber(t))
+ -- end
+
+ local remap = {
+ ["\x00"] = "00", ["\x01"] = "01", ["\x02"] = "02", ["\x03"] = "03", ["\x04"] = "04", ["\x05"] = "05", ["\x06"] = "06", ["\x07"] = "07", ["\x08"] = "08", ["\x09"] = "09", ["\x0A"] = "0.", ["\x0B"] = "0E", ["\x0C"] = "0E-", ["\x0D"] = "0", ["\x0E"] = "0-", ["\x0F"] = "0",
+ ["\x10"] = "10", ["\x11"] = "11", ["\x12"] = "12", ["\x13"] = "13", ["\x14"] = "14", ["\x15"] = "15", ["\x16"] = "16", ["\x17"] = "17", ["\x18"] = "18", ["\x19"] = "19", ["\x1A"] = "0.", ["\x1B"] = "0E", ["\x1C"] = "0E-", ["\x1D"] = "0", ["\x1E"] = "0-", ["\x1F"] = "0",
+ ["\x20"] = "20", ["\x21"] = "21", ["\x22"] = "22", ["\x23"] = "23", ["\x24"] = "24", ["\x25"] = "25", ["\x26"] = "26", ["\x27"] = "27", ["\x28"] = "28", ["\x29"] = "29", ["\x2A"] = "0.", ["\x2B"] = "0E", ["\x2C"] = "0E-", ["\x2D"] = "0", ["\x2E"] = "0-", ["\x2F"] = "0",
+ ["\x30"] = "30", ["\x31"] = "31", ["\x32"] = "32", ["\x33"] = "33", ["\x34"] = "34", ["\x35"] = "35", ["\x36"] = "36", ["\x37"] = "37", ["\x38"] = "38", ["\x39"] = "39", ["\x3A"] = "0.", ["\x3B"] = "0E", ["\x3C"] = "0E-", ["\x3D"] = "0", ["\x3E"] = "0-", ["\x3F"] = "0",
+ ["\x40"] = "40", ["\x41"] = "41", ["\x42"] = "42", ["\x43"] = "43", ["\x44"] = "44", ["\x45"] = "45", ["\x46"] = "46", ["\x47"] = "47", ["\x48"] = "48", ["\x49"] = "49", ["\x4A"] = "0.", ["\x4B"] = "0E", ["\x4C"] = "0E-", ["\x4D"] = "0", ["\x4E"] = "0-", ["\x4F"] = "0",
+ ["\x50"] = "50", ["\x51"] = "51", ["\x52"] = "52", ["\x53"] = "53", ["\x54"] = "54", ["\x55"] = "55", ["\x56"] = "56", ["\x57"] = "57", ["\x58"] = "58", ["\x59"] = "59", ["\x5A"] = "0.", ["\x5B"] = "0E", ["\x5C"] = "0E-", ["\x5D"] = "0", ["\x5E"] = "0-", ["\x5F"] = "0",
+ ["\x60"] = "60", ["\x61"] = "61", ["\x62"] = "62", ["\x63"] = "63", ["\x64"] = "64", ["\x65"] = "65", ["\x66"] = "66", ["\x67"] = "67", ["\x68"] = "68", ["\x69"] = "69", ["\x6A"] = "0.", ["\x6B"] = "0E", ["\x6C"] = "0E-", ["\x6D"] = "0", ["\x6E"] = "0-", ["\x6F"] = "0",
+ ["\x70"] = "70", ["\x71"] = "71", ["\x72"] = "72", ["\x73"] = "73", ["\x74"] = "74", ["\x75"] = "75", ["\x76"] = "76", ["\x77"] = "77", ["\x78"] = "78", ["\x79"] = "79", ["\x7A"] = "0.", ["\x7B"] = "0E", ["\x7C"] = "0E-", ["\x7D"] = "0", ["\x7E"] = "0-", ["\x7F"] = "0",
+ ["\x80"] = "80", ["\x81"] = "81", ["\x82"] = "82", ["\x83"] = "83", ["\x84"] = "84", ["\x85"] = "85", ["\x86"] = "86", ["\x87"] = "87", ["\x88"] = "88", ["\x89"] = "89", ["\x8A"] = "0.", ["\x8B"] = "0E", ["\x8C"] = "0E-", ["\x8D"] = "0", ["\x8E"] = "0-", ["\x8F"] = "0",
+ ["\x90"] = "90", ["\x91"] = "91", ["\x92"] = "92", ["\x93"] = "93", ["\x94"] = "94", ["\x95"] = "95", ["\x96"] = "96", ["\x97"] = "97", ["\x98"] = "98", ["\x99"] = "99", ["\x9A"] = "0.", ["\x9B"] = "0E", ["\x9C"] = "0E-", ["\x9D"] = "0", ["\x9E"] = "0-", ["\x9F"] = "0",
+ ["\xA0"] = ".0", ["\xA1"] = ".1", ["\xA2"] = ".2", ["\xA3"] = ".3", ["\xA4"] = ".4", ["\xA5"] = ".5", ["\xA6"] = ".6", ["\xA7"] = ".7", ["\xA8"] = ".8", ["\xA9"] = ".9", ["\xAA"] = "..", ["\xAB"] = ".E", ["\xAC"] = ".E-", ["\xAD"] = ".", ["\xAE"] = ".-", ["\xAF"] = ".",
+ ["\xB0"] = "E0", ["\xB1"] = "E1", ["\xB2"] = "E2", ["\xB3"] = "E3", ["\xB4"] = "E4", ["\xB5"] = "E5", ["\xB6"] = "E6", ["\xB7"] = "E7", ["\xB8"] = "E8", ["\xB9"] = "E9", ["\xBA"] = "E.", ["\xBB"] = "EE", ["\xBC"] = "EE-", ["\xBD"] = "E", ["\xBE"] = "E-", ["\xBF"] = "E",
+ ["\xC0"] = "E-0", ["\xC1"] = "E-1", ["\xC2"] = "E-2", ["\xC3"] = "E-3", ["\xC4"] = "E-4", ["\xC5"] = "E-5", ["\xC6"] = "E-6", ["\xC7"] = "E-7", ["\xC8"] = "E-8", ["\xC9"] = "E-9", ["\xCA"] = "E-.", ["\xCB"] = "E-E", ["\xCC"] = "E-E-", ["\xCD"] = "E-", ["\xCE"] = "E--", ["\xCF"] = "E-",
+ ["\xD0"] = "-0", ["\xD1"] = "-1", ["\xD2"] = "-2", ["\xD3"] = "-3", ["\xD4"] = "-4", ["\xD5"] = "-5", ["\xD6"] = "-6", ["\xD7"] = "-7", ["\xD8"] = "-8", ["\xD9"] = "-9", ["\xDA"] = "-.", ["\xDB"] = "-E", ["\xDC"] = "-E-", ["\xDD"] = "-", ["\xDE"] = "--", ["\xDF"] = "-",
+ }
+
+ local p_nibbles = P("\30") * Cs(((1-p_last)/remap)^0+p_last) / function(n)
+ -- 0-9=digit a=. b=E c=E- d=reserved e=- f=finish
+ top = top + 1
+ stack[top] = tonumber(n) or 0
+ end
+
+ local p_byte = C(R("\32\246")) / function(b0)
+ -- -107 .. +107
+ top = top + 1
+ stack[top] = byte(b0) - 139
+ end
+
+ local p_positive = C(R("\247\250")) * C(1) / function(b0,b1)
+ -- +108 .. +1131
+ top = top + 1
+ stack[top] = (byte(b0)-247)*256 + byte(b1) + 108
+ end
+
+ local p_negative = C(R("\251\254")) * C(1) / function(b0,b1)
+ -- -1131 .. -108
+ top = top + 1
+ stack[top] = -(byte(b0)-251)*256 - byte(b1) - 108
+ end
+
+ local p_short = P("\28") * C(1) * C(1) / function(b1,b2)
+ -- -32768 .. +32767 : b1<<8 | b2
+ top = top + 1
+ local n = 0x100 * byte(b1) + byte(b2)
+ if n >= 0x8000 then
+ stack[top] = n - 0xFFFF - 1
+ else
+ stack[top] = n
+ end
+ end
+
+ local p_long = P("\29") * C(1) * C(1) * C(1) * C(1) / function(b1,b2,b3,b4)
+ -- -2^31 .. +2^31-1 : b1<<24 | b2<<16 | b3<<8 | b4
+ top = top + 1
+ local n = 0x1000000 * byte(b1) + 0x10000 * byte(b2) + 0x100 * byte(b3) + byte(b4)
+ if n >= 0x8000000 then
+ stack[top] = n - 0xFFFFFFFF - 1
+ else
+ stack[top] = n
+ end
+ end
+
+ local p_unsupported = P(1) / function(detail)
+ top = 0
+ end
+
+ local p_dictionary = (
+ p_byte
+ + p_positive
+ + p_negative
+ + p_short
+ + p_long
+ + p_nibbles
+ + p_single
+ + p_double
+ + p_unsupported
+ )^1
+
+ parsetopdictionary = function(data)
+ local dictionaries = data.dictionaries
+ stack = { }
+ strings = data.strings
+ for i=1,#dictionaries do
+ top = 0
+ result = {
+ monospaced = false,
+ italicangle = 0,
+ underlineposition = -100,
+ underlinethickness = 50,
+ painttype = 0,
+ charstringtype = 2,
+ fontmatrix = { 0.001, 0, 0, 0.001, 0, 0 },
+ fontbbox = { 0, 0, 0, 0 },
+ strokewidth = 0,
+ charset = 0,
+ encoding = 0,
+ cid = {
+ fontversion = 0,
+ fontrevision = 0,
+ fonttype = 0,
+ count = 8720,
+ }
+ }
+ lpegmatch(p_dictionary,dictionaries[i])
+ dictionaries[i] = result
+ end
+ result = { }
+ top = 0
+ stack = { }
+ end
+
+ parseprivates = function(data)
+ local dictionaries = data.dictionaries
+ stack = { }
+ strings = data.strings
+ for i=1,#dictionaries do
+ local private = dictionaries[i].private
+ if private.data then
+ top = 0
+ result = {
+ forcebold = false,
+ languagegroup = 0,
+ expansionfactor = 0.06,
+ initialrandomseed = 0,
+ subroutines = 0,
+ defaultwidthx = 0,
+ nominalwidthx = 0,
+ }
+ lpegmatch(p_dictionary,private.data)
+ private.data = result
+ end
+ end
+ result = { }
+ top = 0
+ stack = { }
+ end
+
+ -- All bezier curves have 6 points with successive pairs relative to
+ -- the previous pair. Some can be left out and are then copied or zero
+ -- (optimization).
+ --
+ -- We are not really interested in all the details of a glyph because we
+ -- only need to calculate the boundingbox. So, todo: a quick no result but
+ -- calculate only variant.
+ --
+ -- The conversion is straightforward and the specification os clear once
+ -- you understand that the x and y needs to be updates each step. It's also
+ -- quite easy to test because in mp a shape will look bad when a few variables
+ -- are swapped. But still there might be bugs down here because not all
+ -- variants are seen in a font so far. We are less compact that the ff code
+ -- because there quite some variants are done in one helper with a lot of
+ -- testing for states.
+
+ local x = 0
+ local y = 0
+ local width = false
+ local r = 0
+ local stems = 0
+ local globalbias = 0
+ local localbias = 0
+ local globals = false
+ local locals = false
+ local depth = 1
+
+ local function showstate(where)
+ report("%w%-10s : [%s] n=%i",depth*2,where,concat(stack," ",1,top),top)
+ end
+
+ local function showvalue(where,value,showstack)
+ if showstack then
+ report("%w%-10s : %s : [%s] n=%i",depth*2,where,tostring(value),concat(stack," ",1,top),top)
+ else
+ report("%w%-10s : %s",depth*2,where,tostring(value))
+ end
+ end
+
+ local function rmoveto()
+ if top > 2 then
+ if not width then
+ width = stack[1]
+ if trace_charstrings then
+ showvalue("width",width)
+ end
+ end
+ top = top - 1
+ elseif not width then
+ width = true
+ end
+ if trace_charstrings then
+ showstate("rmoveto")
+ end
+ x = x + stack[top-1] -- dx1
+ y = y + stack[top] -- dy1
+ top = 0
+ r = r + 1
+ result[r] = { x, y, "m" } -- "moveto"
+ end
+
+ local function hmoveto()
+ if top > 1 then
+ if not width then
+ width = stack[1]
+ if trace_charstrings then
+ showvalue("width",width)
+ end
+ end
+ top = top - 1
+ elseif not width then
+ width = true
+ end
+ if trace_charstrings then
+ showstate("hmoveto")
+ end
+ x = x + stack[top] -- dx1
+ top = 0
+ r = r + 1
+ result[r] = { x, y, "m" } -- "moveto"
+ end
+
+ local function vmoveto()
+ if top > 1 then
+ if not width then
+ width = stack[1]
+ if trace_charstrings then
+ showvalue("width",width)
+ end
+ end
+ top = top - 1
+ elseif not width then
+ width = true
+ end
+ if trace_charstrings then
+ showstate("vmoveto")
+ end
+ y = y + stack[top] -- dy1
+ top = 0
+ r = r + 1
+ result[r] = { x, y, "m" } -- "moveto"
+ end
+
+ local function rlineto()
+ if trace_charstrings then
+ showstate("rlineto")
+ end
+ for i=1,top,2 do
+ x = x + stack[i] -- dxa
+ y = y + stack[i+1] -- dya
+ r = r + 1
+ result[r] = { x, y, "l" } -- "lineto"
+ end
+ top = 0
+ end
+
+ local function xlineto(swap) -- x (y,x)+ | (x,y)+
+ for i=1,top do
+ if swap then
+ x = x + stack[i]
+ swap = false
+ else
+ y = y + stack[i]
+ swap = true
+ end
+ r = r + 1
+ result[r] = { x, y, "l" } -- "lineto"
+ end
+ top = 0
+ end
+
+ local function hlineto() -- x (y,x)+ | (x,y)+
+ if trace_charstrings then
+ showstate("hlineto")
+ end
+ xlineto(true)
+ end
+
+ local function vlineto() -- y (x,y)+ | (y,x)+
+ if trace_charstrings then
+ showstate("vlineto")
+ end
+ xlineto(false)
+ end
+
+ local function rrcurveto()
+ if trace_charstrings then
+ showstate("rrcurveto")
+ end
+ for i=1,top,6 do
+ local ax = x + stack[i] -- dxa
+ local ay = y + stack[i+1] -- dya
+ local bx = ax + stack[i+2] -- dxb
+ local by = ay + stack[i+3] -- dyb
+ x = bx + stack[i+4] -- dxc
+ y = by + stack[i+5] -- dyc
+ r = r + 1
+ result[r] = { ax, ay, bx, by, x, y, "c" } -- "curveto"
+ end
+ top = 0
+ end
+
+ local function hhcurveto()
+ if trace_charstrings then
+ showstate("hhcurveto")
+ end
+ local s = 1
+ if top % 2 ~= 0 then
+ y = y + stack[1] -- dy1
+ s = 2
+ end
+ for i=s,top,4 do
+ local ax = x + stack[i] -- dxa
+ local ay = y
+ local bx = ax + stack[i+1] -- dxb
+ local by = ay + stack[i+2] -- dyb
+ x = bx + stack[i+3] -- dxc
+ y = by
+ r = r + 1
+ result[r] = { ax, ay, bx, by, x, y, "c" } -- "curveto"
+ end
+ top = 0
+ end
+
+ local function vvcurveto()
+ if trace_charstrings then
+ showstate("vvcurveto")
+ end
+ local s = 1
+ local d = 0
+ if top % 2 ~= 0 then
+ d = stack[1] -- dx1
+ s = 2
+ end
+ for i=s,top,4 do
+ local ax = x + d
+ local ay = y + stack[i] -- dya
+ local bx = ax + stack[i+1] -- dxb
+ local by = ay + stack[i+2] -- dyb
+ x = bx
+ y = by + stack[i+3] -- dyc
+ r = r + 1
+ result[r] = { ax, ay, bx, by, x, y, "c" } -- "curveto"
+ d = 0
+ end
+ top = 0
+ end
+
+ local function xxcurveto(swap)
+ local last = top % 4 ~= 0 and stack[top]
+ if last then
+ top = top - 1
+ end
+ local sw = swap
+ for i=1,top,4 do
+ local ax, ay, bx, by
+ if swap then
+ ax = x + stack[i]
+ ay = y
+ bx = ax + stack[i+1]
+ by = ay + stack[i+2]
+ y = by + stack[i+3]
+ if last and i+3 == top then
+ x = bx + last
+ else
+ x = bx
+ end
+ swap = false
+ else
+ ax = x
+ ay = y + stack[i]
+ bx = ax + stack[i+1]
+ by = ay + stack[i+2]
+ x = bx + stack[i+3]
+ if last and i+3 == top then
+ y = by + last
+ else
+ y = by
+ end
+ swap = true
+ end
+ r = r + 1
+ result[r] = { ax, ay, bx, by, x, y, "c" } -- "curveto"
+ end
+ top = 0
+ end
+
+ local function hvcurveto()
+ if trace_charstrings then
+ showstate("hvcurveto")
+ end
+ xxcurveto(true)
+ end
+
+ local function vhcurveto()
+ if trace_charstrings then
+ showstate("vhcurveto")
+ end
+ xxcurveto(false)
+ end
+
+ local function rcurveline()
+ if trace_charstrings then
+ showstate("rcurveline")
+ end
+ for i=1,top-2,6 do
+ local ax = x + stack[i] -- dxa
+ local ay = y + stack[i+1] -- dya
+ local bx = ax + stack[i+2] -- dxb
+ local by = ay + stack[i+3] -- dyb
+ x = bx + stack[i+4] -- dxc
+ y = by + stack[i+5] -- dyc
+ r = r + 1
+ result[r] = { ax, ay, bx, by, x, y, "c" } -- "curveto"
+ end
+ x = x + stack[top-1] -- dxc
+ y = y + stack[top] -- dyc
+ r = r + 1
+ result[r] = { x, y, "l" } -- "lineto"
+ top = 0
+ end
+
+ local function rlinecurve()
+ if trace_charstrings then
+ showstate("rlinecurve")
+ end
+ if top > 6 then
+ for i=1,top-6,2 do
+ x = x + stack[i]
+ y = y + stack[i+1]
+ r = r + 1
+ result[r] = { x, y, "l" } -- "lineto"
+ end
+ end
+ local ax = x + stack[top-5]
+ local ay = y + stack[top-4]
+ local bx = ax + stack[top-3]
+ local by = ay + stack[top-2]
+ x = bx + stack[top-1]
+ y = by + stack[top]
+ r = r + 1
+ result[r] = { ax, ay, bx, by, x, y, "c" } -- "curveto"
+ top = 0
+ end
+
+ local function flex() -- fd not used
+ if trace_charstrings then
+ showstate("flex")
+ end
+ local ax = x + stack[i] -- dx1
+ local ay = y + stack[i+1] -- dy1
+ local bx = ax + stack[i+2] -- dx2
+ local by = ay + stack[i+3] -- dy2
+ local cx = bx + stack[i+4] -- dx3
+ local cy = by + stack[i+5] -- dy3
+ r = r + 1
+ result[r] = { ax, ay, bx, by, cx, cy, "c" } -- "curveto"
+ local dx = cx + stack[i+6] -- dx4
+ local dy = cy + stack[i+7] -- dy4
+ local ex = dx + stack[i+8] -- dx5
+ local ey = dy + stack[i+9] -- dy5
+ x = ex + stack[i+10] -- dx6
+ y = ey + stack[i+11] -- dy6
+ r = r + 1
+ result[r] = { dx, dy, ex, ey, x, y, "c" } -- "curveto"
+ top = 0
+ end
+
+ local function hflex()
+ if trace_charstrings then
+ showstate("hflex")
+ end
+ local ax = x + stack[i ] -- dx1
+ local ay = y
+ local bx = ax + stack[i+1] -- dx2
+ local by = ay + stack[i+2] -- dy2
+ local cx = bx + stack[i+3] -- dx3
+ local cy = by
+ r = r + 1
+ result[r] = { ax, ay, bx, by, cx, cy, "c" } -- "curveto"
+ local dx = cx + stack[i+4] -- dx4
+ local dy = by
+ local ex = dx + stack[i+5] -- dx5
+ local ey = y
+ x = ex + stack[i+6] -- dx6
+ r = r + 1
+ result[r] = { dx, dy, ex, ey, x, y, "c" } -- "curveto"
+ top = 0
+ end
+
+ local function hflex1()
+ if trace_charstrings then
+ showstate("hflex1")
+ end
+ local ax = x + stack[i ] -- dx1
+ local ay = y + stack[i+1] -- dy1
+ local bx = ax + stack[i+2] -- dx2
+ local by = ay + stack[i+3] -- dy2
+ local cx = bx + stack[i+4] -- dx3
+ local cy = by
+ r = r + 1
+ result[r] = { ax, ay, bx, by, cx, cy, "c" } -- "curveto"
+ local dx = cx + stack[i+5] -- dx4
+ local dy = by
+ local ex = dx + stack[i+7] -- dx5
+ local ey = dy + stack[i+8] -- dy5
+ x = ex + stack[i+9] -- dx6
+ r = r + 1
+ result[r] = { dx, dy, dx, dy, x, y, "c" } -- "curveto"
+ top = 0
+ end
+
+ local function flex1()
+ if trace_charstrings then
+ showstate("flex1")
+ end
+ local ax = x + stack[i ] --dx1
+ local ay = y + stack[i+1] --dy1
+ local bx = ax + stack[i+2] --dx2
+ local by = ay + stack[i+3] --dy2
+ local cx = bx + stack[i+4] --dx3
+ local cy = by + stack[i+5] --dy3
+ r = r + 1
+ result[r] = { ax, ay, bx, by, cx, cy, "c" } -- "curveto"
+ local dx = cx + stack[i+6] --dx4
+ local dy = cy + stack[i+7] --dy4
+ local ex = dx + stack[i+8] --dx5
+ local ey = dy + stack[i+9] --dy5
+ if abs(ex - x) > abs(ey - y) then -- spec: abs(dx) > abs(dy)
+ x = ex + stack[i+10]
+ else
+ y = ey + stack[i+10]
+ end
+ r = r + 1
+ result[r] = { dx, dy, dx, dy, x, y, "c" } -- "curveto"
+ top = 0
+ end
+
+ local function getstem()
+ if top % 2 ~= 0 then
+ if width then
+ remove(stack,1)
+ else
+ width = remove(stack,1)
+ end
+ top = top - 1
+ if trace_charstrings then
+ showvalue("width",width)
+ end
+ end
+ if trace_charstrings then
+ showstate("stem")
+ end
+ stems = stems + top/2
+ top = 0
+ end
+
+ local function getmask()
+ if top % 2 ~= 0 then
+ if width then
+ remove(stack,1)
+ else
+ width = remove(stack,1)
+ end
+ top = top - 1
+ if trace_charstrings then
+ showvalue("width",width)
+ end
+ end
+ if trace_charstrings then
+ showstate(operator == 19 and "hintmark" or "cntrmask")
+ end
+ stems = stems + top/2
+ top = 0
+ if stems <= 8 then
+ return 1
+ else
+ return floor((stems+7)/8)
+ end
+ end
+
+ local function unsupported()
+ if trace_charstrings then
+ showstate("unsupported")
+ end
+ top = 0
+ end
+
+ -- Bah, we cannot use a fast lpeg because a hint has an unknown size and a
+ -- runtime capture cannot handle that well.
+
+ local actions = { [0] =
+ unsupported, -- 0
+ getstem, -- 1 -- hstem
+ unsupported, -- 2
+ getstem, -- 3 -- vstem
+ vmoveto, -- 4
+ rlineto, -- 5
+ hlineto, -- 6
+ vlineto, -- 7
+ rrcurveto, -- 8
+ unsupported, -- 9 -- closepath
+ unsupported, -- 10 -- calllocal,
+ unsupported, -- 11 -- callreturn,
+ unsupported, -- 12 -- elsewhere
+ unsupported, -- 13 -- hsbw
+ unsupported, -- 14 -- endchar,
+ unsupported, -- 15
+ unsupported, -- 16
+ unsupported, -- 17
+ getstem, -- 18 -- hstemhm
+ getmask, -- 19 -- hintmask
+ getmask, -- 20 -- cntrmask
+ rmoveto, -- 21
+ hmoveto, -- 22
+ getstem, -- 23 -- vstemhm
+ rcurveline, -- 24
+ rlinecurve, -- 25
+ vvcurveto, -- 26
+ hhcurveto, -- 27
+ unsupported, -- 28 -- elsewhere
+ unsupported, -- 29 -- elsewhere
+ vhcurveto, -- 30
+ hvcurveto, -- 31
+ }
+
+ local subactions = {
+ [034] = hflex,
+ [035] = flex,
+ [036] = hflex1,
+ [037] = flex1,
+ }
+
+ local p_bytes = Ct((P(1)/byte)^0)
+
+ local function call(scope,list,bias,process)
+ local index = stack[top] + bias
+ top = top - 1
+ if trace_charstrings then
+ showvalue(scope,index,true)
+ end
+ local str = list[index]
+ if str then
+ if type(str) == "string" then
+ str = lpegmatch(p_bytes,str)
+ list[index] = str
+ end
+ depth = depth + 1
+ process(str)
+ depth = depth - 1
+ else
+ report("unknown %s %i",scope,index)
+ end
+ end
+
+ local function process(tab) -- I should profile this and optimize the order
+ local i = 1 -- which is something for a cold dark evening.
+ local n = #tab
+ while i <= n do
+ local t = tab[i]
+ if t >= 32 and t<=246 then
+ -- -107 .. +107
+ top = top + 1
+ stack[top] = t - 139
+ i = i + 1
+ elseif t >= 247 and t <= 250 then
+ -- +108 .. +1131
+ top = top + 1
+ stack[top] = (t-247)*256 + tab[i+1] + 108
+ i = i + 2
+ elseif t >= 251 and t <= 254 then
+ -- -1131 .. -108
+ top = top + 1
+ stack[top] = -(t-251)*256 - tab[i+1] - 108
+ i = i + 2
+ elseif t == 28 then
+ -- -32768 .. +32767 : b1<<8 | b2
+ top = top + 1
+ local n = 0x100 * tab[i+1] + tab[i+2]
+ if n >= 0x8000 then
+ stack[top] = n - 0xFFFF - 1
+ else
+ stack[top] = n
+ end
+ i = i + 3
+ elseif t == 255 then
+ local n = 0x100 * tab[i+1] + tab[i+2]
+ top = top + 1
+ if n >= 0x8000 then
+ stack[top] = n - 0xFFFF - 1 + (0x100 * tab[i+3] + tab[i+4])/0xFFFF
+ else
+ stack[top] = n + (0x100 * tab[i+3] + tab[i+4])/0xFFFF
+ end
+ i = i + 5
+ elseif t == 12 then
+ i = i + 1
+ local t = tab[i]
+ local a = subactions[t]
+ if a then
+ a()
+ else
+ if trace_charstrings then
+ showvalue("<subaction>",t)
+ end
+ top = 0
+ end
+ i = i + 1
+ elseif t == 14 then -- endchar
+ if width then
+ -- okay
+ elseif top > 0 then
+ width = stack[1]
+ if trace_charstrings then
+ showvalue("width",width)
+ end
+ else
+ width = true
+ end
+ if trace_charstrings then
+ showstate("endchar")
+ end
+ return
+ elseif t == 11 then
+ if trace_charstrings then
+ showstate("return")
+ end
+ return
+ elseif t == 10 then
+ call("local",locals,localbias,process)
+ i = i + 1
+ elseif t == 29 then
+ call("global",globals,globalbias,process)
+ i = i + 1
+ else
+ local a = actions[t]
+ if a then
+ local s = a()
+ if s then
+ i = i + s
+ end
+ else
+ if trace_charstrings then
+ showvalue("<action>",t)
+ end
+ top = 0
+ end
+ i = i + 1
+ end
+ end
+ end
+
+ local function calculatebounds(segments,x,y)
+ local nofsegments = #segments
+ if nofsegments == 0 then
+ return { x, y, x, y }
+ else
+ local xmin = 10000
+ local xmax = -10000
+ local ymin = 10000
+ local ymax = -10000
+ if x < xmin then xmin = x end
+ if x > xmax then xmax = x end
+ if y < ymin then ymin = y end
+ if y > ymax then ymax = y end
+ -- we now have a reasonable start so we could
+ -- simplyfy the next checks
+ for i=1,nofsegments do
+ local s = segments[i]
+ local x = s[1]
+ local y = s[2]
+ if x < xmin then xmin = x end
+ if x > xmax then xmax = x end
+ if y < ymin then ymin = y end
+ if y > ymax then ymax = y end
+ if s[#s] == "c" then -- "curveto"
+ local x = s[3]
+ local y = s[4]
+ if x < xmin then xmin = x elseif x > xmax then xmax = x end
+ if y < ymin then ymin = y elseif y > ymax then ymax = y end
+ local x = s[5]
+ local y = s[6]
+ if x < xmin then xmin = x elseif x > xmax then xmax = x end
+ if y < ymin then ymin = y elseif y > ymax then ymax = y end
+ end
+ end
+ return { round(xmin), round(ymin), round(xmax), round(ymax) } -- doesn't make ceil more sense
+ end
+ end
+
+ parsecharstrings = function(data,glyphs,doshapes)
+ -- for all charstrings
+ local dictionary = data.dictionaries[1]
+ local charstrings = data.charstrings
+ local charset = data.charset
+ stack = { }
+ glyphs = glyphs or { }
+ strings = data.strings
+ locals = dictionary.subroutines
+ globals = data.routines
+ globalbias = #globals
+ localbias = #locals
+ globalbias = ((globalbias < 1240 and 107) or (globalbias < 33900 and 1131) or 32768) + 1
+ localbias = ((localbias < 1240 and 107) or (localbias < 33900 and 1131) or 32768) + 1
+ local nominalwidth = dictionary.private.data.nominalwidthx or 0
+ local defaultwidth = dictionary.private.data.defaultwidthx or 0
+
+ for i=1,#charstrings do
+ local str = charstrings[i]
+ local tab = lpegmatch(p_bytes,str)
+ local index = i - 1
+ x = 0
+ y = 0
+ width = false
+ r = 0
+ top = 0
+ stems = 0
+ result = { }
+ if trace_charstrings then
+ report("glyph: %i",index)
+ report("data: % t",tab)
+ end
+ --
+ process(tab)
+ --
+ local boundingbox = calculatebounds(result,x,y)
+ --
+ if width == true or width == false then
+ width = defaultwidth
+ else
+ width = nominalwidth + width
+ end
+ --
+ local glyph = glyphs[index] -- can be autodefined in otr
+ if not glyph then
+ glyphs[index] = {
+ segments = doshapes ~= false and result, -- optional
+ boundingbox = boundingbox,
+ width = width,
+ name = charset[index],
+ -- sidebearing = 0,
+ }
+ else
+ glyph.segments = doshapes ~= false and result
+ glyph.boundingbox = boundingbox
+ if not glyph.width then
+ glyph.width = width
+ end
+ if charset and not glyph.name then
+ glyph.name = charset[index]
+ end
+ -- glyph.sidebearing = 0 -- todo
+ end
+ if trace_charstrings then
+ report("width: %s",tostring(width))
+ report("boundingbox: % t",boundingbox)
+ end
+ charstrings[i] = nil -- free memory
+ end
+ result = { }
+ top = 0
+ stack = { }
+ return glyphs
+ end
+
+end
+
+local function readglobals(f,data)
+ local routines = readlengths(f)
+ for i=1,#routines do
+ routines[i] = readstring(f,routines[i])
+ end
+ data.routines = routines
+end
+
+local function readencodings(f,data)
+ data.encodings = { }
+end
+
+local function readcharsets(f,data)
+ local header = data.header
+ local dictionaries = data.dictionaries
+ local strings = data.strings
+ f:seek("set",header.offset+dictionaries[1].charset)
+ local format = readbyte(f)
+ if format == 0 then
+ local charset = { [0] = ".notdef" }
+ for i=1,data.nofglyphs do
+ charset[i] = strings[readushort(f)]
+ end
+ data.charset = charset
+ elseif format == 1 then
+ report("cff parser: todo charset format %a",format)
+ elseif format == 2 then
+ report("cff parser: todo charset format %a",format)
+ else
+ report("cff parser: unsupported charset format %a",format)
+ end
+end
+
+local function readfdselect(f,data)
+end
+
+local function readprivates(f,data)
+ local header = data.header
+ local dictionaries = data.dictionaries
+ local private = dictionaries[1].private
+ if private then
+ f:seek("set",header.offset+private.offset)
+ private.data = readstring(f,private.size)
+ end
+end
+
+local function readlocals(f,data)
+ -- todo: make them local indeed
+ local header = data.header
+ local dictionaries = data.dictionaries
+ local dictionary = dictionaries[1]
+ local private = dictionary.private
+ if private then
+ f:seek("set",header.offset+private.offset+private.data.subroutines)
+ local subroutines = readlengths(f)
+ for i=1,#subroutines do
+ subroutines[i] = readstring(f,subroutines[i])
+ end
+ dictionary.subroutines = subroutines
+ private.data.subroutines = nil
+ end
+end
+
+-- These charstrings are little programs and described in: Technical Note #5177. A truetype
+-- font has only one dictionary.
+
+local function readcharstrings(f,data)
+ local header = data.header
+ local dictionaries = data.dictionaries
+ local dictionary = dictionaries[1]
+ local type = dictionary.charstringtype
+ if type == 2 then
+ f:seek("set",header.offset+dictionary.charstrings)
+ -- could be a metatable .. delayed loading
+ local charstrings = readlengths(f)
+ local nofglyphs = #charstrings
+ for i=1,nofglyphs do
+ charstrings[i] = readstring(f,charstrings[i])
+ end
+ data.nofglyphs = nofglyphs
+ data.charstrings = charstrings
+ else
+ report("unsupported charstr type %i",type)
+ data.nofglyphs = 0
+ data.charstrings = { }
+ end
+end
+
+function fonts.handlers.otf.readers.cff(f,fontdata,specification)
+-- if specification.glyphs then
+ if specification.details then
+ local datatable = fontdata.tables.cff
+ if datatable then
+ local offset = datatable.offset
+ local glyphs = fontdata.glyphs
+ if not f then
+ report("invalid filehandle")
+ return
+ end
+ if offset then
+ f:seek("set",offset)
+ end
+ local header = readheader(f)
+ if header.major > 1 then
+ report("version mismatch")
+ return
+ end
+ local names = readfontnames(f)
+ local dictionaries = readtopdictionaries(f)
+ local strings = readstrings(f)
+ local data = {
+ header = header,
+ names = names,
+ dictionaries = dictionaries,
+ strings = strings,
+ }
+ --
+ parsetopdictionary(data)
+ --
+ local d = dictionaries[1]
+ fontdata.cffinfo = {
+ familynamename = d.familyname,
+ fullname = d.fullname,
+ boundingbox = d.boundingbox,
+ weight = d.weight,
+ italicangle = d.italicangle,
+ underlineposition = d.underlineposition,
+ underlinethickness = d.underlinethickness,
+ monospaced = d.monospaced,
+ }
+ --
+ if specification.glyphs then
+ readglobals(f,data)
+ readcharstrings(f,data)
+ readencodings(f,data)
+ readcharsets(f,data)
+ readfdselect(f,data)
+ --
+ readprivates(f,data)
+ parseprivates(data)
+ readlocals(f,data)
+ --
+ parsecharstrings(data,glyphs,specification.shapes or false)
+ end
+ --
+ -- cleanup (probably more can go)
+ --
+ -- for i=1,#dictionaries do
+ -- local d = dictionaries[i]
+ -- d.subroutines = nil
+ -- end
+ -- data.strings = nil
+ -- if data then
+ -- data.charstrings = nil
+ -- data.routines = nil
+ -- end
+ end
+ end
+end
diff --git a/tex/context/base/font-con.lua b/tex/context/base/font-con.lua
index 72fbb5c0d..137c21e48 100644
--- a/tex/context/base/font-con.lua
+++ b/tex/context/base/font-con.lua
@@ -426,6 +426,7 @@ function constructors.scale(tfmdata,specification)
local vdelta = delta
--
target.designsize = parameters.designsize -- not really needed so it might become obsolete
+ target.units = units
target.units_per_em = units -- just a trigger for the backend
--
local direction = properties.direction or tfmdata.direction or 0 -- pointless, as we don't use omf fonts at all
@@ -895,12 +896,21 @@ function constructors.finalize(tfmdata)
parameters.slantfactor = tfmdata.slant or 0
end
--
- if not parameters.designsize then
- parameters.designsize = tfmdata.designsize or (factors.pt * 10)
+ local designsize = parameters.designsize
+ if designsize then
+ parameters.minsize = tfmdata.minsize or designsize
+ parameters.maxsize = tfmdata.maxsize or designsize
+ else
+ designsize = factors.pt * 10
+ parameters.designsize = designsize
+ parameters.minsize = designsize
+ parameters.maxsize = designsize
end
+ parameters.minsize = tfmdata.minsize or parameters.designsize
+ parameters.maxsize = tfmdata.maxsize or parameters.designsize
--
if not parameters.units then
- parameters.units = tfmdata.units_per_em or 1000
+ parameters.units = tfmdata.units or tfmdata.units_per_em or 1000
end
--
if not tfmdata.descriptions then
@@ -976,6 +986,7 @@ function constructors.finalize(tfmdata)
tfmdata.auto_protrude = nil
tfmdata.extend = nil
tfmdata.slant = nil
+ tfmdata.units = nil
tfmdata.units_per_em = nil
--
tfmdata.cache = nil
diff --git a/tex/context/base/font-lib.mkvi b/tex/context/base/font-lib.mkvi
index 9cc14e02f..b1050f7f5 100644
--- a/tex/context/base/font-lib.mkvi
+++ b/tex/context/base/font-lib.mkvi
@@ -22,6 +22,16 @@
\registerctxluafile{font-agl}{1.001} % if needed we can comment this and delay loading
\registerctxluafile{font-cid}{1.001} % cid maps
\registerctxluafile{font-map}{1.001}
+
+% the otf font loader:
+
+\registerctxluafile{font-otr}{1.001} % opentype fontloader
+\registerctxluafile{font-cff}{1.001} % quadratic outlines
+\registerctxluafile{font-ttf}{1.001} % cubic outlines
+\registerctxluafile{font-tmp}{1.001} % temporary placeholder
+%registerctxluafile{font-dsp}{1.001} % ... for this one
+\registerctxluafile{font-off}{1.001} % the old loader
+
\registerctxluafile{font-syn}{1.001}
\registerctxluafile{font-tfm}{1.001}
@@ -58,6 +68,8 @@
\registerctxluafile{node-fnt}{1.001} % here
+\registerctxluafile{font-mps}{1.001} % outline fun
+
\registerctxluafile{font-lua}{1.001}
\registerctxluafile{font-vf} {1.001}
diff --git a/tex/context/base/font-mis.lua b/tex/context/base/font-mis.lua
index 5169dd4e1..9fe6e224f 100644
--- a/tex/context/base/font-mis.lua
+++ b/tex/context/base/font-mis.lua
@@ -22,7 +22,7 @@ local handlers = fonts.handlers
handlers.otf = handlers.otf or { }
local otf = handlers.otf
-otf.version = otf.version or 2.814
+otf.version = otf.version or 2.815
otf.cache = otf.cache or containers.define("fonts", "otf", otf.version, true)
local fontloader = fontloader
diff --git a/tex/context/base/font-mps.lua b/tex/context/base/font-mps.lua
new file mode 100644
index 000000000..1465b475b
--- /dev/null
+++ b/tex/context/base/font-mps.lua
@@ -0,0 +1,379 @@
+if not modules then modules = { } end modules ['font-mps'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local concat = table.concat
+local formatters = string.formatters
+
+-- QP0 [QP1] QP2 => CP0 [CP1 CP2] CP3
+
+-- CP0 = QP0
+-- CP3 = QP2
+--
+-- CP1 = QP0 + 2/3 *(QP1-QP0)
+-- CP2 = QP2 + 2/3 *(QP1-QP2)
+
+fonts = fonts or { }
+local metapost = fonts.metapost or { }
+fonts.metapost = metapost
+
+local f_moveto = formatters["(%.4G,%.4G)"]
+local f_lineto = formatters["--(%.4G,%.4G)"]
+local f_curveto = formatters["..controls(%.4G,%.4G)and(%.4G,%.4G)..(%.4G,%.4G)"]
+local s_cycle = "--cycle"
+
+local f_nofill = formatters["nofill %s;"]
+local f_dofill = formatters["fill %s;"]
+
+local f_draw_trace = formatters["drawpathonly %s;"]
+local f_draw = formatters["draw %s;"]
+
+local f_boundingbox = formatters["((%.4G,%.4G)--(%.4G,%.4G)--(%.4G,%.4G)--(%.4G,%.4G)--cycle)"]
+local f_vertical = formatters["((%.4G,%.4G)--(%.4G,%.4G))"]
+
+function metapost.boundingbox(d,factor)
+ local bounds = d.boundingbox
+ local factor = factor or 1
+ local llx = factor*bounds[1]
+ local lly = factor*bounds[2]
+ local urx = factor*bounds[3]
+ local ury = factor*bounds[4]
+ return f_boundingbox(llx,lly,urx,lly,urx,ury,llx,ury)
+end
+
+function metapost.widthline(d,factor)
+ local bounds = d.boundingbox
+ local factor = factor or 1
+ local lly = factor*bounds[2]
+ local ury = factor*bounds[4]
+ local width = factor*d.width
+ return f_vertical(width,lly,width,ury)
+end
+
+function metapost.zeroline(d,factor)
+ local bounds = d.boundingbox
+ local factor = factor or 1
+ local lly = factor*bounds[2]
+ local ury = factor*bounds[4]
+ return f_vertical(0,lly,0,ury)
+end
+
+function metapost.paths(d,factor)
+ local sequence = d.sequence
+ local segments = d.segments
+ local list = { }
+ local path = { } -- recycled
+ local size = 0
+ local factor = factor or 1
+ if sequence then
+ local i = 1
+ local n = #sequence
+ while i < n do
+ local operator = sequence[i]
+ if operator == "m" then -- "moveto"
+ if size > 0 then
+ size = size + 1
+ path[size] = s_cycle
+ list[#list+1] = concat(path,"",1,size)
+ size = 1
+ else
+ size = size + 1
+ end
+ path[size] = f_moveto(factor*sequence[i+1],factor*sequence[i+2])
+ i = i + 3
+ elseif operator == "l" then -- "lineto"
+ size = size + 1
+ path[size] = f_lineto(factor*sequence[i+1],factor*sequence[i+2])
+ i = i + 3
+ elseif operator == "c" then -- "curveto"
+ size = size + 1
+ path[size] = f_curveto(factor*sequence[i+1],factor*sequence[i+2],factor*sequence[i+3],factor*sequence[i+4],factor*sequence[i+5],factor*sequence[i+6])
+ i = i + 7
+ elseif operator =="q" then -- "quadraticto"
+ size = size + 1
+ -- first is always a moveto
+ local l_x, l_y = factor*sequence[i-2], factor*sequence[i-1]
+ local m_x, m_y = factor*sequence[i+1], factor*sequence[i+2]
+ local r_x, r_y = factor*sequence[i+3], factor*sequence[i+4]
+ path[size] = f_curveto (
+ l_x + 2/3 * (m_x-l_x),
+ l_y + 2/3 * (m_y-l_y),
+ r_x + 2/3 * (m_x-r_x),
+ r_y + 2/3 * (m_y-r_y),
+ r_x, r_y
+ )
+ i = i + 5
+ else
+ -- weird
+ i = i + 1
+ end
+ end
+ elseif segments then
+ for i=1,#segments do
+ local segment = segments[i]
+ local operator = segment[#segment]
+ if operator == "m" then -- "moveto"
+ if size > 0 then
+ size = size + 1
+ path[size] = s_cycle
+ list[#list+1] = concat(path,"",1,size)
+ size = 1
+ else
+ size = size + 1
+ end
+ path[size] = f_moveto(factor*segment[1],factor*segment[2])
+ elseif operator == "l" then -- "lineto"
+ size = size + 1
+ path[size] = f_lineto(factor*segment[1],factor*segment[2])
+ elseif operator == "c" then -- "curveto"
+ size = size + 1
+ path[size] = f_curveto(factor*segment[1],factor*segment[2],factor*segment[3],factor*segment[4],factor*segment[5],factor*segment[6])
+ elseif operator =="q" then -- "quadraticto"
+ size = size + 1
+ -- first is always a moveto
+ local prev = segments[i-1]
+ local l_x, l_y = factor*prev[#prev-2], factor*prev[#prev-1]
+ local m_x, m_y = factor*segment[1], factor*segment[2]
+ local r_x, r_y = factor*segment[3], factor*segment[4]
+ path[size] = f_curveto (
+ l_x + 2/3 * (m_x-l_x),
+ l_y + 2/3 * (m_y-l_y),
+ r_x + 2/3 * (m_x-r_x),
+ r_y + 2/3 * (m_y-r_y),
+ r_x, r_y
+ )
+ else
+ -- weird
+ end
+ end
+ else
+ return
+ end
+ if size > 0 then
+ size = size + 1
+ path[size] = s_cycle
+ list[#list+1] = concat(path,"",1,size)
+ end
+ return list
+end
+
+function metapost.fill(paths)
+ local r = { }
+ local n = #paths
+ for i=1,n do
+ if i < n then
+ r[i] = f_nofill(paths[i])
+ else
+ r[i] = f_dofill(paths[i])
+ end
+ end
+ return concat(r)
+end
+
+function metapost.draw(paths,trace)
+ local r = { }
+ local n = #paths
+ for i=1,n do
+ if trace then
+ r[i] = f_draw_trace(paths[i])
+ else
+ r[i] = f_draw(paths[i])
+ end
+ end
+ return concat(r)
+end
+
+function metapost.maxbounds(data,index,factor)
+ local maxbounds = data.maxbounds
+ local factor = factor or 1
+ local glyphs = data.glyphs
+ local glyph = glyphs[index]
+ local boundingbox = glyph.boundingbox
+ local xmin, ymin, xmax, ymax
+ if not maxbounds then
+ xmin, ymin, xmax, ymax = 0, 0, 0, 0
+ for i=1,#glyphs do
+ local d = glyphs[i]
+ if d then
+ local b = d.boundingbox
+ if b then
+ if b[1] < xmin then xmin = b[1] end
+ if b[2] < ymin then ymin = b[2] end
+ if b[3] > xmax then xmax = b[3] end
+ if b[4] > ymax then ymax = b[4] end
+ end
+ end
+ end
+ maxbounds = { xmin, ymin, xmax, ymax }
+ data.maxbounds = maxbounds
+ else
+ xmin = maxbounds[1]
+ ymin = maxbounds[2]
+ xmax = maxbounds[3]
+ ymax = maxbounds[4]
+ end
+ local llx = boundingbox[1]
+ local lly = boundingbox[2]
+ local urx = boundingbox[3]
+ local ury = boundingbox[4]
+ local width = glyph.width
+ if llx > 0 then
+ llx = 0
+ end
+ if width > urx then
+ urx = width
+ end
+ return f_boundingbox(
+ factor*llx,factor*ymin,
+ factor*urx,factor*ymin,
+ factor*urx,factor*ymax,
+ factor*llx,factor*ymax
+ )
+end
+
+----- formatters = string.formatters
+----- concat = table.concat
+
+local nodecodes = nodes.nodecodes -- no nuts yet
+
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local kern_code = nodecodes.kern
+local glue_code = nodecodes.glue
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local penalty_code = nodecodes.penalty
+
+----- metapost = fonts.glyphs.metapost
+
+local characters = fonts.hashes.characters
+local shapes = fonts.hashes.shapes
+local topaths = fonts.metapost.paths
+
+local f_code = formatters["mfun_do_outline_text_flush(%q,%i,%.4G,%.4G)(%,t);"]
+local s_nothing = "(origin scaled 10)"
+
+local sc = 10
+local fc = number.dimenfactors.bp * sc / 10
+
+-- todo: make the next more efficient:
+
+function metapost.output(kind,font,char,advance,shift)
+ local character = characters[font][char]
+ if char then
+ local index = character.index
+ if index then
+ local shapedata = shapes[font]
+ local glyphs = shapedata.glyphs -- todo: subfonts fonts.shapes.indexed(font,sub)
+ if glyphs then
+ local glyf = data.glyphs[index]
+ if glyf then
+ local units = data.fontheader and data.fontheader.units or data.units or 1000
+ local factor = sc/units
+ local shift = shift or 0
+ local advance = advance or 0
+ local paths = topaths(glyf,factor)
+ local code = f_code(kind,#paths,advance,shift,paths)
+ return code, glyf.width * factor
+ end
+ end
+ end
+ end
+ return s_nothing, 10 * sc/1000
+end
+
+-- shifted hboxes
+
+function fonts.metapost.boxtomp(n,kind)
+
+ local result = { }
+ local advance = 0
+ local distance = 0
+
+ local boxtomp
+
+ local function horizontal(current,shift,glue_sign,glue_set,glue_order)
+ while current do
+ local id = current.id
+ if id == glyph_code then
+ local code, width = metapost.output(kind,current.font,current.char,advance,-(shift or 0)* fc)
+ result[#result+1] = code
+ advance = advance + width
+ elseif id == disc_code then
+ local replace = current.replace
+ if replace then
+ horizontal(replace,shift,glue_sign,glue_set,glue_order)
+ end
+ elseif id == kern_code then
+ advance = advance + current.kern * fc
+ elseif id == glue_code then
+ local spec = current.spec
+ local width = spec.width
+ if glue_sign == 1 then
+ if spec.stretch_order == glue_order then
+ advance = advance + (width + spec.stretch * glue_set) * fc
+ else
+ advance = advance + width * fc
+ end
+ elseif glue_sign == 2 then
+ if spec.shrink_order == glue_order then
+ advance = advance + (width - spec.shrink * glue_set) * fc
+ else
+ advance = advance + width * fc
+ end
+ else
+ advance = advance + width * fc
+ end
+ elseif id == hlist_code then
+ local a = advance
+ boxtomp(current,(shift or 0)+current.shift,current.glue_sign,current.glue_set,current.glue_order)
+ advance = a + current.width * fc
+ elseif id == vlist_code then
+ boxtomp(current) -- ,distance + (shift or 0),current.glue_set*current.glue_sign)
+ else -- todo: rule
+ -- print("horizontal >>>",nodecodes[id])
+ end
+ current = current.next
+ end
+ end
+
+ local function vertical(current,shift)
+ while current do
+ local id = current.id
+ if id == hlist_code then
+ distance = distance + current.height
+ boxtomp(current,distance + (shift or 0),current.glue_set*current.glue_sign)
+ distance = distance + current.depth
+ elseif id == vlist_code then
+ print("vertical >>>",nodecodes[id])
+ elseif id == kern_code then
+ distance = distance + current.kern
+ advance = 0
+ elseif id == glue_code then
+ distance = distance + current.spec.width
+ advance = 0
+ end
+ current = current.next
+ end
+ end
+
+ boxtomp = function(list,shift)
+ local current = list.list
+ if current then
+ if list.id == hlist_code then
+ horizontal(current,shift,list.glue_sign,list.glue_set,list.glue_order)
+ else
+ vertical(current,shift)
+ end
+ end
+ end
+
+ local box = tex.box[n]
+ boxtomp(box,box.shift,box.glue_sign,box.glue_set,box.glue_order)
+ return concat(result)
+
+end
diff --git a/tex/context/base/font-off.lua b/tex/context/base/font-off.lua
new file mode 100644
index 000000000..34a4d963b
--- /dev/null
+++ b/tex/context/base/font-off.lua
@@ -0,0 +1,228 @@
+if not modules then modules = { } end modules ['font-off'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local lower = string.lower
+local round = math.round
+local setmetatableindex = table.setmetatableindex
+
+local fontloader = fontloader
+local font_to_table = fontloader.to_table
+local open_font = fontloader.open
+local get_font_info = fontloader.info
+local close_font = fontloader.close
+local font_fields = fontloader.fields
+
+-- table={
+-- ["familyname"]="TeXGyrePagella",
+-- ["fontname"]="TeXGyrePagella-Regular",
+-- ["fullname"]="TeXGyrePagella-Regular",
+-- ["italicangle"]=0,
+-- ["names"]={
+-- {
+-- ["lang"]="English (US)",
+-- ["names"]={
+-- ["copyright"]="Copyright 2006, 2009 for TeX Gyre extensions by B. Jackowski and J.M. Nowacki (on behalf of TeX users groups). This work is released under the GUST Font License -- see http://tug.org/fonts/licenses/GUST-FONT-LICENSE.txt for details.",
+-- ["family"]="TeXGyrePagella",
+-- ["fullname"]="TeXGyrePagella-Regular",
+-- ["postscriptname"]="TeXGyrePagella-Regular",
+-- ["preffamilyname"]="TeX Gyre Pagella",
+-- ["subfamily"]="Regular",
+-- ["trademark"]="Please refer to the Copyright section for the font trademark attribution notices.",
+-- ["uniqueid"]="2.004;UKWN;TeXGyrePagella-Regular",
+-- ["version"]="Version 2.004;PS 2.004;hotconv 1.0.49;makeotf.lib2.0.14853",
+-- },
+-- },
+-- },
+-- ["pfminfo"]={
+-- ["avgwidth"]=528,
+-- ["codepages"]={ 536871315, 0 },
+-- ["firstchar"]=32,
+-- ["fstype"]=12,
+-- ["hhead_ascent"]=1098,
+-- ["hhead_descent"]=-283,
+-- ["hheadascent_add"]=0,
+-- ["hheaddescent_add"]=0,
+-- ["hheadset"]=1,
+-- ["lastchar"]=64260,
+-- ["linegap"]=0,
+-- ["os2_breakchar"]=32,
+-- ["os2_capheight"]=692,
+-- ["os2_defaultchar"]=0,
+-- ["os2_family_class"]=0,
+-- ["os2_strikeypos"]=269,
+-- ["os2_strikeysize"]=50,
+-- ["os2_subxoff"]=0,
+-- ["os2_subxsize"]=650,
+-- ["os2_subyoff"]=75,
+-- ["os2_subysize"]=600,
+-- ["os2_supxoff"]=0,
+-- ["os2_supxsize"]=650,
+-- ["os2_supyoff"]=350,
+-- ["os2_supysize"]=600,
+-- ["os2_typoascent"]=726,
+-- ["os2_typodescent"]=-274,
+-- ["os2_typolinegap"]=200,
+-- ["os2_vendor"]="UKWN",
+-- ["os2_winascent"]=1098,
+-- ["os2_windescent"]=283,
+-- ["os2_xheight"]=449,
+-- ["panose"]={
+-- ["armstyle"]="Any",
+-- ["contrast"]="Any",
+-- ["familytype"]="Any",
+-- ["letterform"]="Any",
+-- ["midline"]="Any",
+-- ["proportion"]="Any",
+-- ["serifstyle"]="Any",
+-- ["strokevariation"]="Any",
+-- ["weight"]="Book",
+-- ["xheight"]="Any",
+-- },
+-- ["panose_set"]=1,
+-- ["pfmfamily"]=81,
+-- ["pfmset"]=1,
+-- ["subsuper_set"]=1,
+-- ["typoascent_add"]=0,
+-- ["typodescent_add"]=0,
+-- ["unicoderanges"]={ 536871047, 0, 0, 0 },
+-- ["vheadset"]=0,
+-- ["vlinegap"]=0,
+-- ["weight"]=400,
+-- ["width"]=5,
+-- ["winascent_add"]=0,
+-- ["windescent_add"]=0,
+-- },
+-- ["units_per_em"]=1000,
+-- ["version"]="2.004;PS 2.004;hotconv 1.0.49;makeotf.lib2.0.14853",
+-- ["weight"]="Book",
+-- }
+
+-- We had this as temporary solution because we needed a bit more info but in the
+-- meantime it got an interesting side effect: currently luatex delays loading of e.g.
+-- glyphs so here we first load and then discard which is a waste. In the past it did
+-- free memory because a full load was done. One of these things that goes unnoticed.
+--
+-- local function get_full_info(...) -- check with taco what we get / could get
+-- local ff = open_font(...)
+-- if ff then
+-- local d = ff -- and font_to_table(ff)
+-- d.glyphs, d.subfonts, d.gpos, d.gsub, d.lookups = nil, nil, nil, nil, nil
+-- close_font(ff)
+-- return d
+-- else
+-- return nil, "error in loading font"
+-- end
+-- end
+
+-- Phillip suggested this faster variant but it's still a hack as fontloader.info should
+-- return these keys/values (and maybe some more) but at least we close the loader which
+-- might save some memory in the end.
+
+-- local function get_full_info(name)
+-- local ff = open_font(name)
+-- if ff then
+-- local fields = table.tohash(font_fields(ff),true) -- isn't that one stable
+-- local d = {
+-- names = fields.names and ff.names,
+-- familyname = fields.familyname and ff.familyname,
+-- fullname = fields.fullname and ff.fullname,
+-- fontname = fields.fontname and ff.fontname,
+-- weight = fields.weight and ff.weight,
+-- italicangle = fields.italicangle and ff.italicangle,
+-- units = fields.units_per_em and ff.units_per_em,
+-- designsize = fields.design_size and ff.design_size,
+-- minsize = fields.design_range_bottom and ff.design_range_bottom,
+-- maxsize = fields.design_range_top and ff.design_range_top,
+-- italicangle = fields.italicangle and ff.italicangle,
+-- pfmweight = pfminfo and pfminfo.weight or 400,
+-- pfmwidth = pfminfo and pfminfo.width or 5,
+-- }
+-- -- setmetatableindex(d,function(t,k)
+-- -- report_names("warning, trying to access field %a in font table of %a",k,name)
+-- -- end)
+-- close_font(ff)
+-- return d
+-- else
+-- return nil, "error in loading font"
+-- end
+-- end
+
+-- more efficient:
+
+local fields = nil
+
+local function check_names(names)
+ if names then
+ for i=1,#names do
+ local name = names[i]
+ if name.lang == "English (US)" then
+ return name.names
+ end
+ end
+ end
+end
+
+local function get_full_info(name)
+ local ff = open_font(name)
+ if ff then
+ -- unfortunately luatex aborts when a field is not available but we just make
+ -- sure that we only access a few known ones
+ local pfminfo = ff.pfminfo or { }
+ local names = check_names(ff.names) or { }
+ local weight = names.weight or ff.weight
+ local width = names.width -- no: ff.width
+ local d = {
+ familyname = names.preffamilyname or names.family or ff.familyname,
+ fullname = names.fullname or ff.fullname,
+ fontname = ff.fontname,
+ subfamily = names.subfamily,
+ modifiers = names.prefmodifiers,
+ weight = weight and lower(weight),
+ width = width and lower(width),
+ italicangle = round(1000*(tonumber(ff.italicangle) or 0))/1000 or 0,
+ units = ff.units_per_em,
+ designsize = ff.design_size,
+ minsize = ff.design_range_bottom,
+ maxsize = ff.design_range_top,
+ pfmweight = pfminfo.weight or 400,
+ pfmwidth = pfminfo.width or 5,
+ monospaced = pfminfo.panose and pfminfo.panose.proportion == "Monospaced",
+ }
+ close_font(ff)
+ return d
+ else
+ return nil, "error in loading font"
+ end
+end
+
+-- As we have lazy loading anyway, this one still is full and with less code than
+-- the previous one. But this depends on the garbage collector to kick in and in the
+-- current version that somehow happens not that often (on my machine I end up with
+-- some 3 GB extra before that happens).
+
+-- local function get_full_info(...)
+-- local ff = open_font(...)
+-- if ff then
+-- local d = { } -- ff is userdata so [1] or # fails on it
+-- setmetatableindex(d,ff)
+-- return d -- garbage collection will do the close_font(ff)
+-- else
+-- return nil, "error in loading font"
+-- end
+-- end
+
+fonts = fonts or { }
+local handlers = fonts.handlers or { }
+fonts.handlers = handlers
+local otf = handlers.otf or { }
+handlers.otf = otf
+local readers = otf.readers or { }
+otf.readers = readers
+
+fontloader.fullinfo = get_full_info
+readers.getinfo = readers.getinfo or get_full_info
diff --git a/tex/context/base/font-otf.lua b/tex/context/base/font-otf.lua
index db0118d84..03463fdaa 100644
--- a/tex/context/base/font-otf.lua
+++ b/tex/context/base/font-otf.lua
@@ -60,7 +60,7 @@ local otf = fonts.handlers.otf
otf.glists = { "gsub", "gpos" }
-otf.version = 2.814 -- beware: also sync font-mis.lua
+otf.version = 2.815 -- beware: also sync font-mis.lua
otf.cache = containers.define("fonts", "otf", otf.version, true)
local hashes = fonts.hashes
@@ -2394,10 +2394,14 @@ local function copytotfm(data,cache_id)
local spaceunits = 500
local spacer = "space"
local designsize = metadata.designsize or metadata.design_size or 100
+ local minsize = metadata.minsize or metadata.design_range_bottom or designsize
+ local maxsize = metadata.maxsize or metadata.design_range_top or designsize
local mathspecs = metadata.math
--
if designsize == 0 then
designsize = 100
+ minsize = 100
+ maxsize = 100
end
if mathspecs then
for name, value in next, mathspecs do
@@ -2474,15 +2478,15 @@ local function copytotfm(data,cache_id)
local fontname = metadata.fontname
local fullname = metadata.fullname or fontname
local psname = fontname or fullname
- local units = metadata.units_per_em or 1000
+ local units = metadata.units or metadata.units_per_em or 1000
--
if units == 0 then -- catch bugs in fonts
units = 1000 -- maybe 2000 when ttf
- metadata.units_per_em = 1000
+ metadata.units = 1000
report_otf("changing %a units to %a",0,units)
end
--
- local monospaced = metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion == "Monospaced")
+ local monospaced = metadata.monospaced or metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion == "Monospaced")
local charwidth = pfminfo.avgwidth -- or unset
local charxheight = pfminfo.os2_xheight and pfminfo.os2_xheight > 0 and pfminfo.os2_xheight
-- charwidth = charwidth * units/1000
@@ -2552,17 +2556,16 @@ local function copytotfm(data,cache_id)
end
end
--
- parameters.designsize = (designsize/10)*65536
- parameters.ascender = abs(metadata.ascent or 0)
- parameters.descender = abs(metadata.descent or 0)
- parameters.units = units
+ parameters.designsize = (designsize/10)*65536
+ parameters.minsize = (minsize /10)*65536
+ parameters.maxsize = (maxsize /10)*65536
+ parameters.ascender = abs(metadata.ascender or metadata.ascent or 0)
+ parameters.descender = abs(metadata.descender or metadata.descent or 0)
+ parameters.units = units
--
properties.space = spacer
properties.encodingbytes = 2
properties.format = data.format or otf_format(filename) or formats.otf
--- if units ~= 1000 and format ~= "truetype" then
--- properties.format = "truetype"
--- end
properties.noglyphnames = true
properties.filename = filename
properties.fontname = fontname
diff --git a/tex/context/base/font-otr.lua b/tex/context/base/font-otr.lua
new file mode 100644
index 000000000..a83766f85
--- /dev/null
+++ b/tex/context/base/font-otr.lua
@@ -0,0 +1,1816 @@
+if not modules then modules = { } end modules ['font-otr'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- this code is not yet ready for generic i.e. i want to be free to change the
+-- keys and values
+
+-- we can optimize kern pairs (i.e. simple h only positioning) later if we want
+-- which is easier as then we know if we have clashes between features
+--
+-- When looking into a cid font relates issue in the ff library I wondered if
+-- it made sense to use Lua to filter the information from the otf and ttf
+-- files. Quite some ff code relates to special fonts and in practice we only
+-- use rather normal opentype fonts.
+--
+-- The code here is based on the documentation (and examples) at the microsoft
+-- website. The code will be extended and improved stepwise. We generate a table
+-- that is comparabel with the one luatex creates but also can create one for
+-- context directly.
+--
+-- todo: add checks for versions
+-- todo: check all unsigned / signed
+-- todo: save mode for context font loader (also deal with unicode dups)
+--
+-- widths and weights are kind of messy: for instance lmmonolt ias a pfmweight of
+-- 400 while it should be 300
+--
+-- we can have a bit more in the info data if needed as it will nto really slow
+-- down identifying
+--
+-- the main loader is not yet for production use (work in progress on the dsp file
+-- but as soon we we're done i will also adapt that table (as there is no need to
+-- be completely ff compatible)
+
+if not characters then
+ require("char-def")
+ require("char-ini")
+end
+
+local next, type, unpack = next, type, unpack
+local byte, lower, char = string.byte, string.lower, string.char
+local bittest = bit32.btest
+local concat, remove = table.concat, table.remove
+local floor, mod, abs, sqrt, round = math.floor, math.mod, math.abs, math.sqrt, math.round
+local P, C, R, S, C, Cs, Cc, Ct, Carg, Cmt = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.Carg, lpeg.Cmt
+local lpegmatch = lpeg.match
+
+local setmetatableindex = table.setmetatableindex
+local formatters = string.formatters
+local sortedkeys = table.sortedkeys
+local sortedhash = table.sortedhash
+local stripstring = string.strip
+local utf16_to_utf8_be = utf.utf16_to_utf8_be
+
+local report = logs.reporter("otf reader")
+
+fonts = fonts or { }
+local handlers = fonts.handlers or { }
+fonts.handlers = handlers
+local otf = handlers.otf or { }
+handlers.otf = otf
+local readers = otf.readers or { }
+otf.readers = readers
+
+local files = utilities.files
+
+local readbytes = files.readbytes
+local readstring = files.readstring
+local readbyte = files.readcardinal1 -- 8-bit unsigned integer
+local readushort = files.readcardinal2 -- 16-bit unsigned integer
+local readuint = files.readcardinal3 -- 24-bit unsigned integer
+local readulong = files.readcardinal4 -- 24-bit unsigned integer
+local readchar = files.readinteger1 -- 8-bit signed integer
+local readshort = files.readinteger2 -- 16-bit signed integer
+local readlong = files.readinteger4 -- 24-bit unsigned integer
+local readfixed = files.readfixed4
+local readfword = readshort -- 16-bit signed integer that describes a quantity in FUnits
+local readufword = readushort -- 16-bit unsigned integer that describes a quantity in FUnits
+local readoffset = readushort
+local read2dot14 = files.read2dot14 -- 16-bit signed fixed number with the low 14 bits of fraction (2.14) (F2DOT14)
+
+local readtag = function(f) return f:read(4) end
+local skipshort = function(f,n) f:read(n and 2*n or 2) end
+
+local reportedskipped = { }
+
+local function reportskippedtable(tag)
+ if not reportedskipped[tag] then
+ report("loading of table %a skipped (reported once only)",tag)
+ reportedskipped[tag] = true
+ end
+end
+-- date represented in number of seconds since 12:00 midnight, January 1, 1904. The value is represented as a
+-- signed 64-bit integer
+
+local function readlongdatetime(f)
+ local a, b, c, d, e, f, g, h = byte(f:read(8),1,8)
+ return 0x100000000 * d + 0x1000000 * e + 0x10000 * f + 0x100 * g + h
+end
+
+-- We have quite some data tables. We are somewhat ff compatible with names but as I used
+-- the information form the microsoft site there can be differences. Eventually I might end
+-- up with a different ordering and naming.
+
+local reservednames = { [0] =
+ "copyright",
+ "family",
+ "subfamily",
+ "uniqueid",
+ "fullname",
+ "version",
+ "postscriptname",
+ "trademark",
+ "manufacturer",
+ "designer",
+ "description", -- descriptor in ff
+ "venderurl",
+ "designerurl",
+ "license",
+ "licenseurl",
+ "reserved",
+ "typographicfamily", -- preffamilyname
+ "typographicsubfamily", -- prefmodifiers
+ "compatiblefullname", -- for mac
+ "sampletext",
+ "cidfindfontname",
+ "wwsfamily",
+ "wwssubfamily",
+ "lightbackgroundpalette",
+ "darkbackgroundpalette",
+}
+
+-- more at: https://www.microsoft.com/typography/otspec/name.htm
+
+-- setmetatableindex(reservednames,function(t,k)
+-- local v = "name_" .. k
+-- t[k] = v
+-- return v
+-- end)
+
+local platforms = { [0] =
+ "unicode",
+ "macintosh",
+ "iso",
+ "windows",
+ "custom",
+}
+
+local encodings = {
+ unicode = { [0] =
+ "unicode 1.0 semantics",
+ "unicode 1.1 semantics",
+ "iso/iec 10646",
+ "unicode 2.0 bmp", -- cmap subtable formats 0, 4, 6
+ "unicode 2.0 full", -- cmap subtable formats 0, 4, 6, 10, 12
+ "unicode variation sequences", -- cmap subtable format 14).
+ "unicode full repertoire", -- cmap subtable formats 0, 4, 6, 10, 12, 13
+ },
+ macintosh = { [0] =
+ "roman", "japanese", "chinese (traditional)", "korean", "arabic", "hebrew", "greek", "russian",
+ "rsymbol", "devanagari", "gurmukhi", "gujarati", "oriya", "bengali", "tamil", "telugu", "kannada",
+ "malayalam", "sinhalese", "burmese", "khmer", "thai", "laotian", "georgian", "armenian",
+ "chinese (simplified)", "tibetan", "mongolian", "geez", "slavic", "vietnamese", "sindhi",
+ "uninterpreted",
+ },
+ iso = { [0] =
+ "7-bit ascii",
+ "iso 10646",
+ "iso 8859-1",
+ },
+ windows = { [0] =
+ "symbol",
+ "unicode bmp", -- this is utf16
+ "shiftjis",
+ "prc",
+ "big5",
+ "wansung",
+ "johab",
+ "reserved 7",
+ "reserved 8",
+ "reserved 9",
+ "unicode ucs-4",
+ },
+ custom = {
+ --custom: 0-255 : otf windows nt compatibility mapping
+ }
+}
+
+local decoders = {
+ unicode = { },
+ macintosh = { },
+ iso = { },
+ windows = {
+ ["unicode bmp"] = utf16_to_utf8_be
+ },
+ custom = { },
+}
+
+-- This is bit over the top as we can just look for either windows, unicode or macintosh
+-- names (in that order). A font with no english name is probably a weird one anyway.
+
+local languages = {
+ unicode = {
+ [ 0] = "english",
+ },
+ macintosh = {
+ [ 0] = "english",
+ [ 1] = "french",
+ [ 2] = "german",
+ [ 3] = "italian",
+ [ 4] = "dutch",
+ [ 5] = "swedish",
+ [ 6] = "spanish",
+ [ 7] = "danish",
+ [ 8] = "portuguese",
+ [ 9] = "norwegian",
+ [ 10] = "hebrew",
+ [ 11] = "japanese",
+ [ 12] = "arabic",
+ [ 13] = "finnish",
+ [ 14] = "greek",
+ [ 15] = "icelandic",
+ [ 16] = "maltese",
+ [ 17] = "turkish",
+ [ 18] = "croatian",
+ [ 19] = "chinese (traditional)",
+ [ 20] = "urdu",
+ [ 21] = "hindi",
+ [ 22] = "thai",
+ [ 23] = "korean",
+ [ 24] = "lithuanian",
+ [ 25] = "polish",
+ [ 26] = "hungarian",
+ [ 27] = "estonian",
+ [ 28] = "latvian",
+ [ 29] = "sami",
+ [ 30] = "faroese",
+ [ 31] = "farsi/persian",
+ [ 32] = "russian",
+ [ 33] = "chinese (simplified)",
+ [ 34] = "flemish",
+ [ 35] = "irish gaelic",
+ [ 36] = "albanian",
+ [ 37] = "romanian",
+ [ 38] = "czech",
+ [ 39] = "slovak",
+ [ 40] = "slovenian",
+ [ 41] = "yiddish",
+ [ 42] = "serbian",
+ [ 43] = "macedonian",
+ [ 44] = "bulgarian",
+ [ 45] = "ukrainian",
+ [ 46] = "byelorussian",
+ [ 47] = "uzbek",
+ [ 48] = "kazakh",
+ [ 49] = "azerbaijani (cyrillic script)",
+ [ 50] = "azerbaijani (arabic script)",
+ [ 51] = "armenian",
+ [ 52] = "georgian",
+ [ 53] = "moldavian",
+ [ 54] = "kirghiz",
+ [ 55] = "tajiki",
+ [ 56] = "turkmen",
+ [ 57] = "mongolian (mongolian script)",
+ [ 58] = "mongolian (cyrillic script)",
+ [ 59] = "pashto",
+ [ 60] = "kurdish",
+ [ 61] = "kashmiri",
+ [ 62] = "sindhi",
+ [ 63] = "tibetan",
+ [ 64] = "nepali",
+ [ 65] = "sanskrit",
+ [ 66] = "marathi",
+ [ 67] = "bengali",
+ [ 68] = "assamese",
+ [ 69] = "gujarati",
+ [ 70] = "punjabi",
+ [ 71] = "oriya",
+ [ 72] = "malayalam",
+ [ 73] = "kannada",
+ [ 74] = "tamil",
+ [ 75] = "telugu",
+ [ 76] = "sinhalese",
+ [ 77] = "burmese",
+ [ 78] = "khmer",
+ [ 79] = "lao",
+ [ 80] = "vietnamese",
+ [ 81] = "indonesian",
+ [ 82] = "tagalong",
+ [ 83] = "malay (roman script)",
+ [ 84] = "malay (arabic script)",
+ [ 85] = "amharic",
+ [ 86] = "tigrinya",
+ [ 87] = "galla",
+ [ 88] = "somali",
+ [ 89] = "swahili",
+ [ 90] = "kinyarwanda/ruanda",
+ [ 91] = "rundi",
+ [ 92] = "nyanja/chewa",
+ [ 93] = "malagasy",
+ [ 94] = "esperanto",
+ [128] = "welsh",
+ [129] = "basque",
+ [130] = "catalan",
+ [131] = "latin",
+ [132] = "quenchua",
+ [133] = "guarani",
+ [134] = "aymara",
+ [135] = "tatar",
+ [136] = "uighur",
+ [137] = "dzongkha",
+ [138] = "javanese (roman script)",
+ [139] = "sundanese (roman script)",
+ [140] = "galician",
+ [141] = "afrikaans",
+ [142] = "breton",
+ [143] = "inuktitut",
+ [144] = "scottish gaelic",
+ [145] = "manx gaelic",
+ [146] = "irish gaelic (with dot above)",
+ [147] = "tongan",
+ [148] = "greek (polytonic)",
+ [149] = "greenlandic",
+ [150] = "azerbaijani (roman script)",
+ },
+ iso = {
+ },
+ windows = {
+ [0x0436] = "afrikaans - south africa",
+ [0x041c] = "albanian - albania",
+ [0x0484] = "alsatian - france",
+ [0x045e] = "amharic - ethiopia",
+ [0x1401] = "arabic - algeria",
+ [0x3c01] = "arabic - bahrain",
+ [0x0c01] = "arabic - egypt",
+ [0x0801] = "arabic - iraq",
+ [0x2c01] = "arabic - jordan",
+ [0x3401] = "arabic - kuwait",
+ [0x3001] = "arabic - lebanon",
+ [0x1001] = "arabic - libya",
+ [0x1801] = "arabic - morocco",
+ [0x2001] = "arabic - oman",
+ [0x4001] = "arabic - qatar",
+ [0x0401] = "arabic - saudi arabia",
+ [0x2801] = "arabic - syria",
+ [0x1c01] = "arabic - tunisia",
+ [0x3801] = "arabic - u.a.e.",
+ [0x2401] = "arabic - yemen",
+ [0x042b] = "armenian - armenia",
+ [0x044d] = "assamese - india",
+ [0x082c] = "azeri (cyrillic) - azerbaijan",
+ [0x042c] = "azeri (latin) - azerbaijan",
+ [0x046d] = "bashkir - russia",
+ [0x042d] = "basque - basque",
+ [0x0423] = "belarusian - belarus",
+ [0x0845] = "bengali - bangladesh",
+ [0x0445] = "bengali - india",
+ [0x201a] = "bosnian (cyrillic) - bosnia and herzegovina",
+ [0x141a] = "bosnian (latin) - bosnia and herzegovina",
+ [0x047e] = "breton - france",
+ [0x0402] = "bulgarian - bulgaria",
+ [0x0403] = "catalan - catalan",
+ [0x0c04] = "chinese - hong kong s.a.r.",
+ [0x1404] = "chinese - macao s.a.r.",
+ [0x0804] = "chinese - people's republic of china",
+ [0x1004] = "chinese - singapore",
+ [0x0404] = "chinese - taiwan",
+ [0x0483] = "corsican - france",
+ [0x041a] = "croatian - croatia",
+ [0x101a] = "croatian (latin) - bosnia and herzegovina",
+ [0x0405] = "czech - czech republic",
+ [0x0406] = "danish - denmark",
+ [0x048c] = "dari - afghanistan",
+ [0x0465] = "divehi - maldives",
+ [0x0813] = "dutch - belgium",
+ [0x0413] = "dutch - netherlands",
+ [0x0c09] = "english - australia",
+ [0x2809] = "english - belize",
+ [0x1009] = "english - canada",
+ [0x2409] = "english - caribbean",
+ [0x4009] = "english - india",
+ [0x1809] = "english - ireland",
+ [0x2009] = "english - jamaica",
+ [0x4409] = "english - malaysia",
+ [0x1409] = "english - new zealand",
+ [0x3409] = "english - republic of the philippines",
+ [0x4809] = "english - singapore",
+ [0x1c09] = "english - south africa",
+ [0x2c09] = "english - trinidad and tobago",
+ [0x0809] = "english - united kingdom",
+ [0x0409] = "english - united states",
+ [0x3009] = "english - zimbabwe",
+ [0x0425] = "estonian - estonia",
+ [0x0438] = "faroese - faroe islands",
+ [0x0464] = "filipino - philippines",
+ [0x040b] = "finnish - finland",
+ [0x080c] = "french - belgium",
+ [0x0c0c] = "french - canada",
+ [0x040c] = "french - france",
+ [0x140c] = "french - luxembourg",
+ [0x180c] = "french - principality of monoco",
+ [0x100c] = "french - switzerland",
+ [0x0462] = "frisian - netherlands",
+ [0x0456] = "galician - galician",
+ [0x0437] = "georgian -georgia",
+ [0x0c07] = "german - austria",
+ [0x0407] = "german - germany",
+ [0x1407] = "german - liechtenstein",
+ [0x1007] = "german - luxembourg",
+ [0x0807] = "german - switzerland",
+ [0x0408] = "greek - greece",
+ [0x046f] = "greenlandic - greenland",
+ [0x0447] = "gujarati - india",
+ [0x0468] = "hausa (latin) - nigeria",
+ [0x040d] = "hebrew - israel",
+ [0x0439] = "hindi - india",
+ [0x040e] = "hungarian - hungary",
+ [0x040f] = "icelandic - iceland",
+ [0x0470] = "igbo - nigeria",
+ [0x0421] = "indonesian - indonesia",
+ [0x045d] = "inuktitut - canada",
+ [0x085d] = "inuktitut (latin) - canada",
+ [0x083c] = "irish - ireland",
+ [0x0434] = "isixhosa - south africa",
+ [0x0435] = "isizulu - south africa",
+ [0x0410] = "italian - italy",
+ [0x0810] = "italian - switzerland",
+ [0x0411] = "japanese - japan",
+ [0x044b] = "kannada - india",
+ [0x043f] = "kazakh - kazakhstan",
+ [0x0453] = "khmer - cambodia",
+ [0x0486] = "k'iche - guatemala",
+ [0x0487] = "kinyarwanda - rwanda",
+ [0x0441] = "kiswahili - kenya",
+ [0x0457] = "konkani - india",
+ [0x0412] = "korean - korea",
+ [0x0440] = "kyrgyz - kyrgyzstan",
+ [0x0454] = "lao - lao p.d.r.",
+ [0x0426] = "latvian - latvia",
+ [0x0427] = "lithuanian - lithuania",
+ [0x082e] = "lower sorbian - germany",
+ [0x046e] = "luxembourgish - luxembourg",
+ [0x042f] = "macedonian (fyrom) - former yugoslav republic of macedonia",
+ [0x083e] = "malay - brunei darussalam",
+ [0x043e] = "malay - malaysia",
+ [0x044c] = "malayalam - india",
+ [0x043a] = "maltese - malta",
+ [0x0481] = "maori - new zealand",
+ [0x047a] = "mapudungun - chile",
+ [0x044e] = "marathi - india",
+ [0x047c] = "mohawk - mohawk",
+ [0x0450] = "mongolian (cyrillic) - mongolia",
+ [0x0850] = "mongolian (traditional) - people's republic of china",
+ [0x0461] = "nepali - nepal",
+ [0x0414] = "norwegian (bokmal) - norway",
+ [0x0814] = "norwegian (nynorsk) - norway",
+ [0x0482] = "occitan - france",
+ [0x0448] = "odia (formerly oriya) - india",
+ [0x0463] = "pashto - afghanistan",
+ [0x0415] = "polish - poland",
+ [0x0416] = "portuguese - brazil",
+ [0x0816] = "portuguese - portugal",
+ [0x0446] = "punjabi - india",
+ [0x046b] = "quechua - bolivia",
+ [0x086b] = "quechua - ecuador",
+ [0x0c6b] = "quechua - peru",
+ [0x0418] = "romanian - romania",
+ [0x0417] = "romansh - switzerland",
+ [0x0419] = "russian - russia",
+ [0x243b] = "sami (inari) - finland",
+ [0x103b] = "sami (lule) - norway",
+ [0x143b] = "sami (lule) - sweden",
+ [0x0c3b] = "sami (northern) - finland",
+ [0x043b] = "sami (northern) - norway",
+ [0x083b] = "sami (northern) - sweden",
+ [0x203b] = "sami (skolt) - finland",
+ [0x183b] = "sami (southern) - norway",
+ [0x1c3b] = "sami (southern) - sweden",
+ [0x044f] = "sanskrit - india",
+ [0x1c1a] = "serbian (cyrillic) - bosnia and herzegovina",
+ [0x0c1a] = "serbian (cyrillic) - serbia",
+ [0x181a] = "serbian (latin) - bosnia and herzegovina",
+ [0x081a] = "serbian (latin) - serbia",
+ [0x046c] = "sesotho sa leboa - south africa",
+ [0x0432] = "setswana - south africa",
+ [0x045b] = "sinhala - sri lanka",
+ [0x041b] = "slovak - slovakia",
+ [0x0424] = "slovenian - slovenia",
+ [0x2c0a] = "spanish - argentina",
+ [0x400a] = "spanish - bolivia",
+ [0x340a] = "spanish - chile",
+ [0x240a] = "spanish - colombia",
+ [0x140a] = "spanish - costa rica",
+ [0x1c0a] = "spanish - dominican republic",
+ [0x300a] = "spanish - ecuador",
+ [0x440a] = "spanish - el salvador",
+ [0x100a] = "spanish - guatemala",
+ [0x480a] = "spanish - honduras",
+ [0x080a] = "spanish - mexico",
+ [0x4c0a] = "spanish - nicaragua",
+ [0x180a] = "spanish - panama",
+ [0x3c0a] = "spanish - paraguay",
+ [0x280a] = "spanish - peru",
+ [0x500a] = "spanish - puerto rico",
+ [0x0c0a] = "spanish (modern sort) - spain",
+ [0x040a] = "spanish (traditional sort) - spain",
+ [0x540a] = "spanish - united states",
+ [0x380a] = "spanish - uruguay",
+ [0x200a] = "spanish - venezuela",
+ [0x081d] = "sweden - finland",
+ [0x041d] = "swedish - sweden",
+ [0x045a] = "syriac - syria",
+ [0x0428] = "tajik (cyrillic) - tajikistan",
+ [0x085f] = "tamazight (latin) - algeria",
+ [0x0449] = "tamil - india",
+ [0x0444] = "tatar - russia",
+ [0x044a] = "telugu - india",
+ [0x041e] = "thai - thailand",
+ [0x0451] = "tibetan - prc",
+ [0x041f] = "turkish - turkey",
+ [0x0442] = "turkmen - turkmenistan",
+ [0x0480] = "uighur - prc",
+ [0x0422] = "ukrainian - ukraine",
+ [0x042e] = "upper sorbian - germany",
+ [0x0420] = "urdu - islamic republic of pakistan",
+ [0x0843] = "uzbek (cyrillic) - uzbekistan",
+ [0x0443] = "uzbek (latin) - uzbekistan",
+ [0x042a] = "vietnamese - vietnam",
+ [0x0452] = "welsh - united kingdom",
+ [0x0488] = "wolof - senegal",
+ [0x0485] = "yakut - russia",
+ [0x0478] = "yi - prc",
+ [0x046a] = "yoruba - nigeria",
+ },
+ custom = {
+ },
+}
+
+local standardromanencoding = { [0] = -- hijacked from wikipedia
+ "notdef", ".null", "nonmarkingreturn", "space", "exclam", "quotedbl",
+ "numbersign", "dollar", "percent", "ampersand", "quotesingle", "parenleft",
+ "parenright", "asterisk", "plus", "comma", "hyphen", "period", "slash",
+ "zero", "one", "two", "three", "four", "five", "six", "seven", "eight",
+ "nine", "colon", "semicolon", "less", "equal", "greater", "question", "at",
+ "A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O",
+ "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z", "bracketleft",
+ "backslash", "bracketright", "asciicircum", "underscore", "grave", "a", "b",
+ "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q",
+ "r", "s", "t", "u", "v", "w", "x", "y", "z", "braceleft", "bar",
+ "braceright", "asciitilde", "Adieresis", "Aring", "Ccedilla", "Eacute",
+ "Ntilde", "Odieresis", "Udieresis", "aacute", "agrave", "acircumflex",
+ "adieresis", "atilde", "aring", "ccedilla", "eacute", "egrave",
+ "ecircumflex", "edieresis", "iacute", "igrave", "icircumflex", "idieresis",
+ "ntilde", "oacute", "ograve", "ocircumflex", "odieresis", "otilde", "uacute",
+ "ugrave", "ucircumflex", "udieresis", "dagger", "degree", "cent", "sterling",
+ "section", "bullet", "paragraph", "germandbls", "registered", "copyright",
+ "trademark", "acute", "dieresis", "notequal", "AE", "Oslash", "infinity",
+ "plusminus", "lessequal", "greaterequal", "yen", "mu", "partialdiff",
+ "summation", "product", "pi", "integral", "ordfeminine", "ordmasculine",
+ "Omega", "ae", "oslash", "questiondown", "exclamdown", "logicalnot",
+ "radical", "florin", "approxequal", "Delta", "guillemotleft",
+ "guillemotright", "ellipsis", "nonbreakingspace", "Agrave", "Atilde",
+ "Otilde", "OE", "oe", "endash", "emdash", "quotedblleft", "quotedblright",
+ "quoteleft", "quoteright", "divide", "lozenge", "ydieresis", "Ydieresis",
+ "fraction", "currency", "guilsinglleft", "guilsinglright", "fi", "fl",
+ "daggerdbl", "periodcentered", "quotesinglbase", "quotedblbase",
+ "perthousand", "Acircumflex", "Ecircumflex", "Aacute", "Edieresis", "Egrave",
+ "Iacute", "Icircumflex", "Idieresis", "Igrave", "Oacute", "Ocircumflex",
+ "apple", "Ograve", "Uacute", "Ucircumflex", "Ugrave", "dotlessi",
+ "circumflex", "tilde", "macron", "breve", "dotaccent", "ring", "cedilla",
+ "hungarumlaut", "ogonek", "caron", "Lslash", "lslash", "Scaron", "scaron",
+ "Zcaron", "zcaron", "brokenbar", "Eth", "eth", "Yacute", "yacute", "Thorn",
+ "thorn", "minus", "multiply", "onesuperior", "twosuperior", "threesuperior",
+ "onehalf", "onequarter", "threequarters", "franc", "Gbreve", "gbreve",
+ "Idotaccent", "Scedilla", "scedilla", "Cacute", "cacute", "Ccaron", "ccaron",
+ "dcroat",
+}
+
+local weights = {
+ [100] = "thin",
+ [200] = "extralight",
+ [300] = "light",
+ [400] = "normal",
+ [500] = "medium",
+ [600] = "semibold",
+ [700] = "bold",
+ [800] = "extrabold",
+ [900] = "black",
+}
+
+local widths = {
+ [1] = "ultracondensed",
+ [2] = "extracondensed",
+ [3] = "condensed",
+ [4] = "semicondensed",
+ [5] = "normal",
+ [6] = "semiexpanded",
+ [7] = "expanded",
+ [8] = "extraexpanded",
+ [9] = "ultraexpanded",
+}
+
+setmetatableindex(weights, function(t,k)
+ local r = floor((k + 50) / 100) * 100
+ local v = (r > 900 and "black") or rawget(t,r) or "normal"
+-- print("weight:",k,r,v)
+ return v
+end)
+
+setmetatableindex(widths,function(t,k)
+-- print("width:",k)
+ return "normal"
+end)
+
+local panoseweights = {
+ [ 0] = "normal",
+ [ 1] = "normal",
+ [ 2] = "verylight",
+ [ 3] = "light",
+ [ 4] = "thin",
+ [ 5] = "book",
+ [ 6] = "medium",
+ [ 7] = "demi",
+ [ 8] = "bold",
+ [ 9] = "heavy",
+ [10] = "black",
+}
+
+local panosewidths = {
+ [ 0] = "normal",
+ [ 1] = "normal",
+ [ 2] = "normal",
+ [ 3] = "normal",
+ [ 4] = "normal",
+ [ 5] = "expanded",
+ [ 6] = "condensed",
+ [ 7] = "veryexpanded",
+ [ 8] = "verycondensed",
+ [ 9] = "monospaced",
+}
+
+-- We implement a reader per table.
+
+-- The name table is probably the first one to load. After all this one provides
+-- useful information about what we deal with. The complication is that we need
+-- to filter the best one available.
+
+function readers.name(f,fontdata)
+ local datatable = fontdata.tables.name
+ if datatable then
+ f:seek("set",datatable.offset)
+ local format = readushort(f)
+ local nofnames = readushort(f)
+ local offset = readushort(f)
+ -- we can also provide a raw list as extra, todo as option
+ local namelists = {
+ unicode = { },
+ windows = { },
+ macintosh = { },
+ -- iso = { },
+ -- windows = { },
+ }
+ for i=1,nofnames do
+ local platform = platforms[readushort(f)]
+ if platform then
+ local namelist = namelists[platform]
+ if namelist then
+ local encoding = readushort(f)
+ local language = readushort(f)
+ local encodings = encodings[platform]
+ local languages = languages[platform]
+ if encodings and languages then
+ local encoding = encodings[encoding]
+ local language = languages[language]
+ if encoding and language then
+ local name = reservednames[readushort(f)]
+ if name then
+ namelist[#namelist+1] = {
+ platform = platform,
+ encoding = encoding,
+ language = language,
+ name = name,
+ length = readushort(f),
+ offset = readushort(f),
+ }
+ else
+ skipshort(f,2)
+ end
+ else
+ skipshort(f,3)
+ end
+ else
+ skipshort(f,3)
+ end
+ else
+ skipshort(f,5)
+ end
+ else
+ skipshort(f,5)
+ end
+ end
+ -- if format == 1 then
+ -- local noftags = readushort(f)
+ -- for i=1,noftags do
+ -- local length = readushort(f)
+ -- local offset = readushort(f)
+ -- end
+ -- end
+ --
+ -- we need to choose one we like, for instance an unicode one
+ --
+ local start = datatable.offset + offset
+ local names = { }
+ local done = { }
+ --
+ -- there is quite some logic in ff ... hard to follow so we start simple
+ -- and extend when we run into it (todo: proper reverse hash) .. we're only
+ -- interested in english anyway
+ --
+ local function filter(platform,e,l)
+ local namelist = namelists[platform]
+ for i=1,#namelist do
+ local name = namelist[i]
+ local nametag = name.name
+ if not done[nametag] then
+ local encoding = name.encoding
+ local language = name.language
+ if (not e or encoding == e) and (not l or language == l) then
+ f:seek("set",start+name.offset)
+ local content = readstring(f,name.length)
+ local decoder = decoders[platform]
+ if decoder then
+ decoder = decoder[encoding]
+ end
+ if decoder then
+ content = decoder(content)
+ end
+ names[nametag] = {
+ content = content,
+ platform = platform,
+ encoding = encoding,
+ language = language,
+ }
+ done[nametag] = true
+ end
+ end
+ end
+ end
+ --
+ filter("windows","unicode bmp","english - united states")
+ -- filter("unicode") -- which one ?
+ filter("macintosh","roman","english")
+ filter("windows")
+ filter("macintosh")
+ filter("unicode")
+ --
+ fontdata.names = names
+ else
+ fontdata.names = { }
+ end
+end
+
+-- This table is an original windows (with its precursor os/2) table. In ff this one is
+-- part of the pfminfo table but here we keep it separate (for now). We will create a
+-- properties table afterwards.
+
+readers["os/2"] = function(f,fontdata)
+ local datatable = fontdata.tables["os/2"]
+ if datatable then
+ f:seek("set",datatable.offset)
+ local version = readushort(f)
+ local windowsmetrics = {
+ version = version,
+ averagewidth = readushort(f),
+ weightclass = readushort(f),
+ widthclass = readushort(f),
+ fstype = readushort(f),
+ subscriptxsize = readushort(f),
+ subscriptysize = readushort(f),
+ subscriptxoffset = readushort(f),
+ subscriptyoffset = readushort(f),
+ superscriptxsize = readushort(f),
+ superscriptysize = readushort(f),
+ superscriptxoffset = readushort(f),
+ superscriptyoffset = readushort(f),
+ strikeoutsize = readushort(f),
+ strikeoutpos = readushort(f),
+ familyclass = readushort(f),
+ panose = { readbytes(f,10) },
+ unicoderanges = { readulong(f), readulong(f), readulong(f), readulong(f) },
+ vendor = readstring(f,4),
+ fsselection = readushort(f),
+ firstcharindex = readushort(f),
+ lastcharindex = readushort(f),
+ typoascender = readushort(f),
+ typodescender = readushort(f),
+ typolinegap = readushort(f),
+ winascent = readushort(f),
+ windescent = readushort(f),
+ }
+ if version >= 1 then
+ windowsmetrics.codepageranges = { readulong(f), readulong(f) }
+ end
+ if version >= 3 then
+ windowsmetrics.xheight = readshort(f)
+ windowsmetrics.capheight = readshort(f)
+ windowsmetrics.defaultchar = readushort(f)
+ windowsmetrics.breakchar = readushort(f)
+ -- windowsmetrics.maxcontexts = readushort(f)
+ -- windowsmetrics.loweropticalpointsize = readushort(f)
+ -- windowsmetrics.upperopticalpointsize = readushort(f)
+ end
+ --
+ -- todo: unicoderanges
+ --
+ windowsmetrics.weight = windowsmetrics.weightclass and weights[windowsmetrics.weightclass]
+ windowsmetrics.width = windowsmetrics.widthclass and widths [windowsmetrics.widthclass]
+ --
+ windowsmetrics.panoseweight = panoseweights[windowsmetrics.panose[3]]
+ windowsmetrics.panosewidth = panosewidths [windowsmetrics.panose[4]]
+ --
+ fontdata.windowsmetrics = windowsmetrics
+ else
+ fontdata.windowsmetrics = { }
+ end
+end
+
+readers.head = function(f,fontdata)
+ local datatable = fontdata.tables.head
+ if datatable then
+ f:seek("set",datatable.offset)
+ local fontheader = {
+ version = readfixed(f),
+ revision = readfixed(f),
+ checksum = readulong(f),
+ magic = readulong(f),
+ flags = readushort(f),
+ units = readushort(f),
+ created = readlongdatetime(f),
+ modified = readlongdatetime(f),
+ xmin = readshort(f),
+ ymin = readshort(f),
+ xmax = readshort(f),
+ ymax = readshort(f),
+ macstyle = readushort(f),
+ smallpixels = readushort(f),
+ directionhint = readshort(f),
+ indextolocformat = readshort(f),
+ glyphformat = readshort(f),
+ }
+ fontdata.fontheader = fontheader
+ fontdata.nofglyphs = 0
+ else
+ fontdata.fontheader = { }
+ fontdata.nofglyphs = 0
+ end
+end
+
+-- This table is a rather simple one. No treatment of values is needed here. Most
+-- variables are not used but nofhmetrics is quite important.
+
+readers.hhea = function(f,fontdata,specification)
+ if specification.details then
+ local datatable = fontdata.tables.hhea
+ if datatable then
+ f:seek("set",datatable.offset)
+ fontdata.horizontalheader = {
+ version = readfixed(f),
+ ascender = readfword(f),
+ descender = readfword(f),
+ linegap = readfword(f),
+ maxadvancewidth = readufword(f),
+ minleftsidebearing = readfword(f),
+ minrightsidebearing = readfword(f),
+ maxextent = readfword(f),
+ caretsloperise = readshort(f),
+ caretsloperun = readshort(f),
+ caretoffset = readshort(f),
+ reserved_1 = readshort(f),
+ reserved_2 = readshort(f),
+ reserved_3 = readshort(f),
+ reserved_4 = readshort(f),
+ metricdataformat = readshort(f),
+ nofhmetrics = readushort(f),
+ }
+ else
+ fontdata.horizontalheader = {
+ nofhmetrics = 0,
+ }
+ end
+ end
+end
+
+-- We probably never need all these variables, but we do need the nofglyphs
+-- when loading other tables. Again we use the microsoft names but see no reason
+-- to have "max" in each name.
+
+-- fontdata.maximumprofile can be bad
+
+readers.maxp = function(f,fontdata,specification)
+ if specification.details then
+ local datatable = fontdata.tables.maxp
+ if datatable then
+ f:seek("set",datatable.offset)
+ local version = readfixed(f)
+ if version == 0.5 then
+ fontdata.maximumprofile = {
+ version = version,
+ nofglyphs = readushort(f),
+ }
+ return
+ elseif version == 1.0 then
+ fontdata.maximumprofile = {
+ version = version,
+ nofglyphs = readushort(f),
+ points = readushort(f),
+ contours = readushort(f),
+ compositepoints = readushort(f),
+ compositecontours = readushort(f),
+ zones = readushort(f),
+ twilightpoints = readushort(f),
+ storage = readushort(f),
+ functiondefs = readushort(f),
+ instructiondefs = readushort(f),
+ stackelements = readushort(f),
+ sizeofinstructions = readushort(f),
+ componentelements = readushort(f),
+ componentdepth = readushort(f),
+ }
+ return
+ end
+ end
+ fontdata.maximumprofile = {
+ version = version,
+ nofglyphs = 0,
+ }
+ end
+end
+
+-- Here we filter the (advance) widths (that can be different from the boundingbox
+-- width of course).
+
+readers.hmtx = function(f,fontdata,specification)
+ if specification.glyphs then
+ local datatable = fontdata.tables.hmtx
+ if datatable then
+ f:seek("set",datatable.offset)
+ local nofmetrics = fontdata.horizontalheader.nofhmetrics
+ local glyphs = fontdata.glyphs
+ local nofglyphs = fontdata.nofglyphs
+ local nofrepeated = nofglyphs - nofmetrics
+ local width = 0 -- advance
+ local leftsidebearing = 0
+ for i=0,nofmetrics-1 do
+ local glyph = glyphs[i]
+ width = readshort(f)
+ leftsidebearing = readshort(f)
+ if advance ~= 0 then
+ glyph.width = width
+ end
+ if leftsidebearing ~= 0 then
+ glyph.lsb = leftsidebearing
+ end
+ end
+ -- The next can happen in for instance a monospace font or in a cjk font
+ -- with fixed widths.
+ for i=nofmetrics,nofrepeated do
+ local glyph = glyphs[i]
+ if width ~= 0 then
+ glyph.width = width
+ end
+ if leftsidebearing ~= 0 then
+ glyph.lsb = leftsidebearing
+ end
+ end
+ end
+ end
+end
+
+-- The post table relates to postscript (printing) but has some relevant
+-- properties for other usage as well. We just use the names from the microsoft
+-- specification. The version 2.0 description is somewhat fuzzy but it is a
+-- hybrid with overloads.
+
+readers.post = function(f,fontdata,specification)
+ local datatable = fontdata.tables.post
+ if datatable then
+ f:seek("set",datatable.offset)
+ local version = readfixed(f)
+ fontdata.postscript = {
+ version = version,
+ italicangle = round(1000*readfixed(f))/1000,
+ underlineposition = readfword(f),
+ underlinethickness = readfword(f),
+ monospaced = readulong(f),
+ minmemtype42 = readulong(f),
+ maxmemtype42 = readulong(f),
+ minmemtype1 = readulong(f),
+ maxmemtype1 = readulong(f),
+ }
+ if not specification.glyphs then
+ -- enough done
+ elseif version == 1.0 then
+ -- mac encoding (258 glyphs)
+ for index=0,#standardromanencoding do
+ glyphs[index].name = standardromanencoding[index]
+ end
+ elseif version == 2.0 then
+ local glyphs = fontdata.glyphs
+ local nofglyphs = readushort(f)
+ local filesize = fontdata.filesize
+ local indices = { }
+ local names = { }
+ local maxnames = 0
+ for i=0,nofglyphs-1 do
+ local nameindex = readushort(f)
+ if nameindex >= 258 then
+ maxnames = maxnames + 1
+ nameindex = nameindex - 257
+ indices[nameindex] = i
+ else
+ glyphs[i].name = standardromanencoding[nameindex]
+ end
+ end
+ for i=1,maxnames do
+ local length = readbyte(f)
+ if length > 0 then
+ glyphs[indices[i]].name = readstring(f,length)
+ else
+ report("quit post name fetching at %a of %a",i,maxnames)
+ break
+ end
+ end
+ elseif version == 2.5 then
+ -- depricated, will be done when needed
+ elseif version == 3.0 then
+ -- no ps name information
+ end
+ else
+ fontdata.postscript = { }
+ end
+end
+
+readers.cff = function(f,fontdata,specification)
+ if specification.glyphs then
+ reportskippedtable("cff")
+ end
+end
+
+-- Not all cmaps make sense .. e.g. dfont is obsolete and probably more are not
+-- relevant. Let's see what we run into. There is some weird calculation going
+-- on here because we offset in a table being a blob of memory or file.
+
+local formatreaders = { }
+
+formatreaders[4] = function(f,fontdata,offset)
+ f:seek("set",offset+2) -- skip format
+ --
+ local length = readushort(f) -- in bytes of subtable
+ local language = readushort(f)
+ local nofsegments = readushort(f) / 2
+ --
+ skipshort(f,3) -- searchrange entryselector rangeshift
+ --
+ local endchars = { }
+ local startchars = { }
+ local deltas = { }
+ local offsets = { }
+ local indices = { }
+ local mapmap = fontdata.map.map
+ local glyphs = fontdata.glyphs
+ --
+ for i=1,nofsegments do
+ endchars[i] = readushort(f)
+ end
+ local reserved = readushort(f)
+ for i=1,nofsegments do
+ startchars[i] = readushort(f)
+ end
+ for i=1,nofsegments do
+ deltas[i] = readshort(f)
+ end
+ for i=1,nofsegments do
+ offsets[i] = readushort(f)
+ end
+ -- format length language nofsegments searchrange entryselector rangeshift 4-tables
+ local size = (length - 2 * 2 - 5 * 2 - 4 * nofsegments * 2) / 2
+ for i=1,size-1 do
+ indices[i] = readushort(f)
+ end
+ --
+ for segment=1,nofsegments do
+ local startchar = startchars[segment]
+ local endchar = endchars[segment]
+ local offset = offsets[segment]
+ local delta = deltas[segment]
+ if startchar == 0xFFFF and endchar == 0xFFFF then
+ break
+ elseif offset == 0 then
+ for char=startchar,endchar do
+ local unicode = char
+ local index = mod(char + delta,65536)
+ if index and index > 0 then
+ local glyph = glyphs[index]
+ if not glyph.unicode then
+ glyph.unicode = unicode
+ end
+ mapmap[index] = unicode
+ -- report("%C %04i %05i %s",unicode,index,glyphs[index].name)
+ end
+ end
+ else
+ local shift = (segment-nofsegments+offset/2) - startchar
+ for char=startchar,endchar do
+ local unicode = mod(char + delta,65536)
+ local slot = shift + char
+ local index = indices[slot]
+ if index and index > 0 then
+ local glyph = glyphs[index]
+ if not glyph.unicode then
+ glyph.unicode = unicode
+ end
+ mapmap[index] = unicode
+ -- report("%C %04i %05i %s",unicode,index,glyphs[index].name)
+ end
+ end
+ end
+ end
+
+end
+
+formatreaders[6] = function(f,fontdata,offset)
+ f:seek("set",offset+2+2+2) -- skip format length language
+ local mapmap = fontdata.map.map
+ local glyphs = fontdata.glyphs
+ local start = readushort(f)
+ local count = readushort(f)
+ for unicode=start,start+count-1 do
+ local index = readushort(f)
+ if index > 0 then
+ local glyph = glyphs[index]
+ if not glyph.unicode then
+ glyph.unicode = unicode
+ end
+ mapmap[unicode] = index
+ end
+ end
+end
+
+formatreaders[12] = function(f,fontdata,offset)
+ f:seek("set",offset+2+2+4+4) -- skip format reserved length language
+ local mapmap = fontdata.map.map
+ local glyphs = fontdata.glyphs
+ local nofgroups = readulong(f)
+ for i=1,nofgroups do
+ local first = readulong(f)
+ local last = readulong(f)
+ local index = readulong(f)
+ for unicode=first,last do
+ local glyph = glyphs[index]
+ if not glyph.unicode then
+ glyph.unicode = unicode
+ end
+ mapmap[unicode] = index
+ index = index + 1
+ end
+ end
+end
+
+local function checkcmap(f,fontdata,records,platform,encoding,format)
+ local data = records[platform]
+ if not data then
+ return
+ end
+ data = data[encoding]
+ if not data then
+ return
+ end
+ data = data[format]
+ if not data then
+ return
+ end
+ local reader = formatreaders[format]
+ if not reader then
+ return
+ end
+ -- report("checking cmap: platform %a, encoding %a, format %a",platform,encoding,format)
+ reader(f,fontdata,data)
+ return true
+end
+
+function readers.cmap(f,fontdata,specification)
+ if specification.glyphs then
+ local datatable = fontdata.tables.cmap
+ if datatable then
+ local tableoffset = datatable.offset
+ f:seek("set",tableoffset)
+ local version = readushort(f)
+ local noftables = readushort(f)
+ local records = { }
+ local unicodecid = false
+ local variantcid = false
+ for i=1,noftables do
+ local platform = readushort(f)
+ local encoding = readushort(f)
+ local offset = readulong(f)
+ local record = records[platform]
+ if not record then
+ records[platform] = {
+ [encoding] = {
+ offsets = { offset },
+ formats = { },
+ }
+ }
+ else
+ local subtables = record[encoding]
+ if not subtables then
+ record[encoding] = {
+ offsets = { offset },
+ formats = { },
+ }
+ else
+ local offsets = subtables.offsets
+ offsets[#offsets+1] = offset
+ end
+ end
+ end
+ for platform, record in next, records do
+ for encoding, subtables in next, record do
+ local offsets = subtables.offsets
+ local formats = subtables.formats
+ for i=1,#offsets do
+ local offset = tableoffset + offsets[i]
+ f:seek("set",offset)
+ formats[readushort(f)] = offset
+ end
+ record[encoding] = formats
+ end
+ end
+ --
+ checkcmap(f,fontdata,records,3, 1, 4)
+ checkcmap(f,fontdata,records,3,10,12)
+ -- checkcmap(f,fontdata,records,0, 3, 4)
+ -- checkcmap(f,fontdata,records,1, 0, 6)
+ -- todo
+ variantcid = records[0] and records[0][5]
+ --
+ fontdata.cidmaps = {
+ version = version,
+ noftables = noftables,
+ records = records,
+ }
+ else
+ fontdata.cidmaps = { }
+ end
+ end
+end
+
+-- The glyf table depends on the loca table. We have one entry to much
+-- in the locations table (the last one is a dummy) because we need to
+-- calculate the size of a glyph blob from the delta, although we not
+-- need it in our usage (yet). We can remove the locations table when
+-- we're done (todo: cleanup finalizer).
+
+function readers.loca(f,fontdata,specification)
+ if specification.glyphs then
+ reportskippedtable("loca")
+ end
+end
+
+function readers.glyf(f,fontdata,specification) -- part goes to cff module
+ if specification.glyphs then
+ reportskippedtable("glyf")
+ end
+end
+
+-- Here we have a table that we really need for later processing although a more
+-- advanced gpos table can also be available. Todo: we need a 'fake' lookup for
+-- this (analogue to ff).
+
+function readers.kern(f,fontdata,specification)
+ if specification.kerns then
+ local datatable = fontdata.tables.kern
+ if datatable then
+ f:seek("set",datatable.offset)
+ local version = readushort(f)
+ local noftables = readushort(f)
+ for i=1,noftables do
+ local version = readushort(f)
+ local length = readushort(f)
+ local coverage = readushort(f)
+ -- bit 8-15 of coverage: format 0 or 2
+ local format = bit32.rshift(coverage,8) -- is this ok?
+ if format == 0 then
+ local nofpairs = readushort(f)
+ local searchrange = readushort(f)
+ local entryselector = readushort(f)
+ local rangeshift = readushort(f)
+ local kerns = { }
+ local glyphs = fontdata.glyphs
+ for i=1,nofpairs do
+ local left = readushort(f)
+ local right = readushort(f)
+ local kern = readfword(f)
+ local glyph = glyphs[left]
+ local kerns = glyph.kerns
+ if kerns then
+ kerns[right] = kern
+ else
+ glyph.kerns = { [right] = kern }
+ end
+ end
+ -- fontdata.kerns = kerns
+ elseif format == 2 then
+ report("todo: kern classes")
+ else
+ report("todo: kerns")
+ end
+ end
+ end
+ end
+end
+
+function readers.gdef(f,fontdata,specification)
+ if specification.details then
+ reportskippedtable("gdef")
+ end
+end
+
+function readers.gsub(f,fontdata,specification)
+ if specification.details then
+ reportskippedtable("gsub")
+ end
+end
+
+function readers.gpos(f,fontdata,specification)
+ if specification.details then
+ reportskippedtable("gpos")
+ end
+end
+
+function readers.math(f,fontdata,specification)
+ if specification.glyphs then
+ local datatable = fontdata.tables.math
+ if datatable then
+ f:seek("set",datatable.offset)
+ local scriptlist = readulong(f)
+ local featurelist = readulong(f)
+ local lookuplist = readulong(f)
+ -- todo
+ end
+ end
+end
+
+-- Goodie. A sequence instead of segments costs a bit more memory, some 300K on a
+-- dejavu serif and about the same on a pagella regular.
+
+local function packoutlines(data,makesequence)
+ local subfonts = data.subfonts
+ if subfonts then
+ for i=1,#subfonts do
+ packoutlines(subfonts[i],makesequence)
+ end
+ return
+ end
+ local common = data.segments
+ if common then
+ return
+ end
+ local glyphs = data.glyphs
+ if not glyphs then
+ return
+ end
+ if makesequence then
+ for index=1,#glyphs do
+ local glyph = glyphs[index]
+ local segments = glyph.segments
+ if segments then
+ local sequence = { }
+ local nofsequence = 0
+ for i=1,#segments do
+ local segment = segments[i]
+ local nofsegment = #segment
+ nofsequence = nofsequence + 1
+ sequence[nofsequence] = segment[nofsegment]
+ for i=1,nofsegment-1 do
+ nofsequence = nofsequence + 1
+ sequence[nofsequence] = segment[i]
+ end
+ end
+ glyph.sequence = sequence
+ glyph.segments = nil
+ end
+ end
+ else
+ local hash = { }
+ local common = { }
+ local reverse = { }
+ local last = 0
+ for index=1,#glyphs do
+ local segments = glyphs[index].segments
+ if segments then
+ for i=1,#segments do
+ local h = concat(segments[i]," ")
+ hash[h] = (hash[h] or 0) + 1
+ end
+ end
+ end
+ for index=1,#glyphs do
+ local segments = glyphs[index].segments
+ if segments then
+ for i=1,#segments do
+ local segment = segments[i]
+ local h = concat(segment," ")
+ if hash[h] > 1 then
+ local idx = reverse[h]
+ if not idx then
+ last = last + 1
+ reverse[h] = last
+ common[last] = segment
+ idx = last
+ end
+ segments[i] = idx
+ end
+ end
+ end
+ end
+ if last > 0 then
+ data.segments = common
+ end
+ end
+end
+
+local function unpackoutlines(data)
+ local subfonts = data.subfonts
+ if subfonts then
+ for i=1,#subfonts do
+ unpackoutlines(subfonts[i])
+ end
+ return
+ end
+ local common = data.segments
+ if not common then
+ return
+ end
+ local glyphs = data.glyphs
+ if not glyphs then
+ return
+ end
+ for index=1,#glyphs do
+ local segments = glyphs[index].segments
+ if segments then
+ for i=1,#segments do
+ local c = common[segments[i]]
+ if c then
+ segments[i] = c
+ end
+ end
+ end
+ end
+ data.segments = nil
+end
+
+otf.packoutlines = packoutlines
+otf.unpackoutlines = unpackoutlines
+
+-- Now comes the loader. The order of reading these matters as we need to know
+-- some properties in order to read following tables. When details is true we also
+-- initialize the glyphs data.
+
+-- options:
+--
+-- properties : common metrics, names, list of features
+-- glyphs : metrics, encoding
+-- shapes : sequences or segments
+-- kerns : global (ttf) kerns
+-- lookups : gsub and gpos lookups
+
+local function readdata(f,offset,specification)
+ if offset then
+ f:seek("set",offset)
+ end
+ local tables = { }
+ local basename = file.basename(specification.filename)
+ local filesize = specification.filesize
+ local fontdata = { -- some can/will go
+ filename = basename,
+ filesize = filesize,
+ version = readstring(f,4),
+ noftables = readushort(f),
+ searchrange = readushort(f), -- not needed
+ entryselector = readushort(f), -- not needed
+ rangeshift = readushort(f), -- not needed
+ tables = tables,
+ }
+ for i=1,fontdata.noftables do
+ local tag = lower(stripstring(readstring(f,4)))
+ local checksum = readulong(f) -- not used
+ local offset = readulong(f)
+ local length = readulong(f)
+ if offset + length > filesize then
+ report("bad %a table in file %a",tag,basename)
+ end
+ tables[tag] = {
+ checksum = checksum,
+ offset = offset,
+ length = length,
+ }
+ end
+ if specification.glyphs then
+ local glyphs = setmetatableindex(function(t,k)
+ local v = {
+ -- maybe more defaults
+ index = k,
+ }
+ t[k] = v
+ return v
+ end)
+ local map = {
+ map = { },
+ backmap = { },
+ }
+ fontdata.glyphs = glyphs
+ fontdata.map = map
+ end
+ readers["name"](f,fontdata,specification)
+ readers["os/2"](f,fontdata,specification)
+ readers["head"](f,fontdata,specification)
+ readers["maxp"](f,fontdata,specification)
+ readers["hhea"](f,fontdata,specification)
+ readers["hmtx"](f,fontdata,specification)
+ readers["post"](f,fontdata,specification)
+ readers["cff" ](f,fontdata,specification)
+ readers["cmap"](f,fontdata,specification)
+ readers["loca"](f,fontdata,specification)
+ readers["glyf"](f,fontdata,specification)
+ readers["kern"](f,fontdata,specification)
+ readers["gdef"](f,fontdata,specification)
+ readers["gsub"](f,fontdata,specification)
+ readers["gpos"](f,fontdata,specification)
+ readers["math"](f,fontdata,specification)
+ --
+ fontdata.locations = nil
+ fontdata.tables = nil
+ fontdata.cidmaps = nil
+ fontdata.dictionaries = nil
+ --
+ -- fontdata.cff = nil
+ --
+ return fontdata
+end
+
+local function loadfontdata(specification)
+ local filename = specification.filename
+ local filesize = file.size(filename)
+ local f = io.open(filename,"rb")
+ if f then
+ if filesize > 0 then
+ specification.filesize = filesize
+ local version = readstring(f,4)
+ local fontdata = nil
+ if version == "OTTO" or version == "true" or version == "\0\1\0\0" then
+ fontdata = readdata(f,0,specification)
+ elseif version == "ttcf" then
+ local subfont = tonumber(specification.subfont)
+ local offsets = { }
+ local ttcversion = readulong(f)
+ local nofsubfonts = readulong(f)
+ for i=1,nofsubfonts do
+ offsets[i] = readulong(f)
+ end
+ if subfont then
+ if subfont > 1 and subfont <= nofsubfonts then
+ fontdata = readdata(f,offsets[subfont],specification)
+ else
+ report("no subfont %a in file %a",subfont,filename)
+ end
+ else
+ local subfonts = { }
+ fontdata = {
+ filename = filename,
+ filesize = filesize,
+ version = version,
+ subfonts = subfonts,
+ ttcversion = ttcversion,
+ nofsubfonts = nofsubfonts,
+ }
+ for i=1,fontdata.nofsubfonts do
+ subfonts[i] = readdata(f,offsets[i],specification)
+ end
+ end
+ else
+ report("unknown version %a in file %a",version,filename)
+ end
+ f:close()
+ return fontdata
+ else
+ report("empty file %a",filename)
+ f:close()
+ end
+ else
+ report("unable to open %a",filename)
+ end
+end
+
+local function loadfont(specification)
+ if type(specification) == "string" then
+ specification = {
+ filename = specification,
+ info = true, -- always true (for now)
+ details = true,
+ glyphs = true,
+ shapes = true,
+ kerns = true,
+ lookups = true,
+ -- true or number:
+ subfont = true,
+ }
+ end
+ -- if shapes only then
+ if specification.shapes or specification.lookups or specification.kerns then
+ specification.glyphs = true
+ end
+ if specification.glyphs then
+ specification.details = true
+ end
+ if specification.details then
+ specification.info = true
+ end
+ local function message(str)
+ report("fatal error in file %a: %s",specification.filename,str)
+ end
+ local ok, result = xpcall(loadfontdata,message,specification)
+ if ok then
+ return result
+ end
+end
+
+readers.loadfont = loadfont
+
+----- validutf = lpeg.patterns.utf8character^0 * P(-1)
+local validutf = lpeg.patterns.validutf8
+
+local function getinfo(maindata,sub)
+ local fontdata = sub and maindata.subfonts[sub] or maindata
+ local names = fontdata.names
+ if names then
+ local metrics = fontdata.windowsmetrics or { }
+ local postscript = fontdata.postscript or { }
+ local fontheader = fontdata.fontheader or { }
+ local cffinfo = fontdata.cffinfo or { }
+ local filename = fontdata.filename
+ --
+ local function name(key)
+ local value = names[key]
+ if value then
+ local content = value.content
+ return lpegmatch(validutf,content) and content or nil
+ end
+ end
+ --
+ local weight = name("weight") or cffinfo.weight or metrics.weight
+ local width = name("width") or cffinfo.width or metrics.width
+ local info = { -- we inherit some inconsistencies/choices from ff
+ subfontindex = sub or 0,
+ -- filename = filename,
+ -- version = name("version"),
+ fontname = name("postscriptname"),
+ fullname = name("fullname"), -- or file.nameonly(filename)
+ familyname = name("typographicfamily") or name("family"),
+ subfamily = name("subfamily"),
+ modifiers = name("typographicsubfamily"),
+ weight = weight and lower(weight),
+ width = width and lower(width),
+ pfmweight = metrics.weightclass or 400, -- will become weightclass
+ pfmwidth = metrics.widthclass or 5, -- will become widthclass
+ panosewidth = metrics.panosewidth,
+ panoseweight = metrics.panoseweight,
+ italicangle = postscript.italicangle or 0,
+ units = fontheader.units or 0,
+ designsize = fontdata.designsize,
+ minsize = fontdata.minsize,
+ maxsize = fontdata.maxsize,
+ monospaced = (tonumber(postscript.monospaced or 0) > 0) or metrics.panosewidth == "monospaced",
+ }
+ return info
+ elseif n then
+ return {
+ filename = fontdata.filename,
+ comment = "there is no info for subfont " .. n,
+ }
+ else
+ return {
+ filename = fontdata.filename,
+ comment = "there is no info",
+ }
+ end
+end
+
+-- we need even less, but we can have a 'detail' variant
+
+function readers.loadshapes(filename,n)
+ local fontdata = loadfont {
+ filename = filename,
+ shapes = true,
+ subfont = n,
+ }
+ return fontdata and {
+ -- version = 0.123 -- todo
+ filename = filename,
+ glyphs = fontdata.glyphs,
+ units = fontdata.fontheader.units,
+ } or {
+ filename = filename,
+ glyphs = { },
+ units = 0,
+ }
+end
+
+function readers.getinfo(filename,n,details)
+ local fontdata = loadfont {
+ filename = filename,
+ details = true,
+ }
+-- if string.find(filename,"ource") then
+-- inspect(fontdata)
+-- end
+ if fontdata then
+ local subfonts = fontdata.subfonts
+ if not subfonts then
+ return getinfo(fontdata)
+ elseif type(n) ~= "number" then
+ local info = { }
+ for i=1,#subfonts do
+ info[i] = getinfo(fontdata,i)
+ end
+ return info
+ elseif n > 1 and n <= subfonts then
+ return getinfo(fontdata,n)
+ else
+ return {
+ filename = filename,
+ comment = "there is no subfont " .. n .. " in this file"
+ }
+ end
+ else
+ return {
+ filename = filename,
+ comment = "the file cannot be opened for reading",
+ }
+ end
+end
+
+--
+
+if fonts.hashes then
+
+ local identifiers = fonts.hashes.identifiers
+ local loadshapes = readers.loadshapes
+
+ readers.version = 0.006
+ readers.cache = containers.define("fonts", "shapes", readers.version, true)
+
+ -- todo: loaders per format
+
+ local function load(filename,sub)
+ local base = file.basename(filename)
+ local name = file.removesuffix(base)
+ local kind = file.suffix(filename)
+ local attr = lfs.attributes(filename)
+ local size = attr and attr.size or 0
+ local time = attr and attr.modification or 0
+ local sub = tonumber(sub)
+ if size > 0 and (kind == "otf" or kind == "ttf" or kind == "tcc") then
+ local hash = containers.cleanname(base) -- including suffix
+ if sub then
+ hash = hash .. "-" .. sub
+ end
+ data = containers.read(readers.cache,hash)
+ if not data or data.time ~= time or data.size ~= size then
+ data = loadshapes(filename,sub)
+ if data then
+ data.size = size
+ data.format = "opentype"
+ data.time = time
+ packoutlines(data)
+ containers.write(readers.cache,hash,data)
+ data = containers.read(readers.cache,hash) -- frees old mem
+ end
+ end
+ unpackoutlines(data)
+ else
+ data = {
+ filename = filename,
+ size = 0,
+ time = time,
+ format = "unknown",
+ units = 1000,
+ glyphs = { }
+ }
+ end
+ return data
+ end
+
+ fonts.hashes.shapes = table.setmetatableindex(function(t,k)
+ local d = identifiers[k]
+ local v = load(d.properties.filename,d.subindex)
+ t[k] = v
+ return v
+ end)
+
+end
diff --git a/tex/context/base/font-pat.lua b/tex/context/base/font-pat.lua
index 049853796..357b12ea0 100644
--- a/tex/context/base/font-pat.lua
+++ b/tex/context/base/font-pat.lua
@@ -11,29 +11,12 @@ if not modules then modules = { } end modules ['font-pat'] = {
local match, lower = string.match, string.lower
--- Older versions of latin modern didn't have the designsize set so for them we
--- get it from the name reporter moved to elsewhere.
-
local fonts = fonts
local otf = fonts.handlers.otf
local patches = otf.enhancers.patches
local register = patches.register
local report = patches.report
--- local function patch(data,filename)
--- if not metadata.design_size or metadata.design_size == 0 then
--- local ds = match(file.basename(lower(filename)),"(%d+)")
--- if ds then
--- report("font %a has design size %a",filename,ds)
--- metadata.design_size = tonumber(ds) * 10
--- end
--- end
--- end
---
--- register("after","migrate metadata","^lmroman", patch)
--- register("after","migrate metadata","^lmsans", patch)
--- register("after","migrate metadata","^lmtypewriter",patch)
-
-- For some reason (either it's a bug in the font, or it's a problem in the
-- library) the palatino arabic fonts don't have the mkmk features properly
-- set up.
diff --git a/tex/context/base/font-syn.lua b/tex/context/base/font-syn.lua
index da9c19967..fa152466d 100644
--- a/tex/context/base/font-syn.lua
+++ b/tex/context/base/font-syn.lua
@@ -15,14 +15,15 @@ if not modules then modules = { } end modules ['font-syn'] = {
-- new lua loader: 5 sec
local next, tonumber, type, tostring = next, tonumber, type, tostring
-local sub, gsub, lower, match, find, lower, upper = string.sub, string.gsub, string.lower, string.match, string.find, string.lower, string.upper
-local find, gmatch = string.find, string.gmatch
-local concat, sort, format = table.concat, table.sort, string.format
+local sub, gsub, match, find, lower, upper = string.sub, string.gsub, string.match, string.find, string.lower, string.upper
+local concat, sort = table.concat, table.sort
local serialize, sortedhash = table.serialize, table.sortedhash
local lpegmatch = lpeg.match
local unpack = unpack or table.unpack
local formatters, topattern = string.formatters, string.topattern
local round = math.round
+local P, R, S, C, Cc, Ct, Cs = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cc, lpeg.Ct, lpeg.Cs
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local allocate = utilities.storage.allocate
local sparse = utilities.storage.sparse
@@ -42,12 +43,8 @@ local findfile = resolvers.findfile
local cleanpath = resolvers.cleanpath
local resolveprefix = resolvers.resolve
-local fontloader = fontloader
-local font_to_table = fontloader.to_table
-local open_font = fontloader.open
-local get_font_info = fontloader.info
-local close_font = fontloader.close
-local font_fields = fontloader.fields
+----- fontloader = fontloader -- still needed for pfb (now)
+----- get_font_info = fontloader.info
local settings_to_hash = utilities.parsers.settings_to_hash_tolerant
@@ -76,7 +73,7 @@ fonts.treatments = treatments
names.data = names.data or allocate { }
-names.version = 1.123
+names.version = 1.125
names.basename = "names"
names.saved = false
names.loaded = false
@@ -94,8 +91,6 @@ directives.register("fonts.usesystemfonts", function(v) usesystemfonts = toboole
<p>A few helpers.</p>
--ldx]]--
-local P, C, Cc, Cs = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Cs
-
-- -- what to do with these -- --
--
-- thin -> thin
@@ -313,122 +308,54 @@ end
but to keep the overview, we define them here.</p>
--ldx]]--
-filters.otf = get_font_info
-filters.ttf = get_font_info
-filters.ttc = get_font_info
-filters.dfont = get_font_info
+-- filters.dfont = get_font_info
--- We had this as temporary solution because we needed a bit more info but in the
--- meantime it got an interesting side effect: currently luatex delays loading of e.g.
--- glyphs so here we first load and then discard which is a waste. In the past it did
--- free memory because a full load was done. One of these things that goes unnoticed.
---
--- missing: names, units_per_em, design_range_bottom, design_range_top, design_size,
--- pfminfo, top_side_bearing
-
--- local function get_full_info(...) -- check with taco what we get / could get
--- local ff = open_font(...)
--- if ff then
--- local d = ff -- and font_to_table(ff)
--- d.glyphs, d.subfonts, d.gpos, d.gsub, d.lookups = nil, nil, nil, nil, nil
--- close_font(ff)
--- return d
--- else
--- return nil, "error in loading font"
--- end
--- end
+filters.otf = fonts.handlers.otf.readers.getinfo
+filters.ttf = filters.otf
+filters.ttc = filters.otf
--- Phillip suggested this faster variant but it's still a hack as fontloader.info should
--- return these keys/values (and maybe some more) but at least we close the loader which
--- might save some memory in the end.
-
--- local function get_full_info(name)
--- local ff = open_font(name)
--- if ff then
--- local fields = table.tohash(font_fields(ff),true) -- isn't that one stable
--- local d = {
--- names = fields.names and ff.names,
--- familyname = fields.familyname and ff.familyname,
--- fullname = fields.fullname and ff.fullname,
--- fontname = fields.fontname and ff.fontname,
--- weight = fields.weight and ff.weight,
--- italicangle = fields.italicangle and ff.italicangle,
--- units_per_em = fields.units_per_em and ff.units_per_em,
--- design_range_bottom = fields.design_range_bottom and ff.design_range_bottom,
--- design_range_top = fields.design_range_top and ff.design_range_top,
--- design_size = fields.design_size and ff.design_size,
--- italicangle = fields.italicangle and ff.italicangle,
--- pfminfo = fields.pfminfo and ff.pfminfo,
--- top_side_bearing = fields.top_side_bearing and ff.top_side_bearing,
--- }
--- -- setmetatableindex(d,function(t,k)
--- -- report_names("warning, trying to access field %a in font table of %a",k,name)
--- -- end)
--- close_font(ff)
--- return d
--- else
--- return nil, "error in loading font"
--- end
--- end
-
--- more efficient:
-
-local fields = nil
-
-local function get_full_info(name)
- local ff = open_font(name)
- if ff then
- if not fields then
- fields = table.tohash(font_fields(ff),true)
- end
- -- unfortunately luatex aborts when a field is not available
- local d = {
- names = fields.names and ff.names,
- familyname = fields.familyname and ff.familyname,
- fullname = fields.fullname and ff.fullname,
- fontname = fields.fontname and ff.fontname,
- weight = fields.weight and ff.weight,
- italicangle = fields.italicangle and ff.italicangle,
- units_per_em = fields.units_per_em and ff.units_per_em,
- design_range_bottom = fields.design_range_bottom and ff.design_range_bottom,
- design_range_top = fields.design_range_top and ff.design_range_top,
- design_size = fields.design_size and ff.design_size,
- italicangle = fields.italicangle and ff.italicangle,
- pfminfo = fields.pfminfo and ff.pfminfo,
- top_side_bearing = fields.top_side_bearing and ff.top_side_bearing, -- not there
- }
- if d.italicangle then
- d.italicangle = round(1000*d.italicangle)/1000
- end
- -- setmetatableindex(d,function(t,k)
- -- report_names("warning, trying to access field %a in font table of %a",k,name)
- -- end)
- close_font(ff)
- return d
+local function normalize(t)
+ local boundingbox = t.fontbbox
+ if boundingbox then
+ for i=1,#boundingbox do
+ boundingbox[i] = tonumber(boundingbox[i])
+ end
else
- return nil, "error in loading font"
- end
+ boundingbox = { 0, 0, 0, 0 }
+ end
+ return {
+ copyright = t.copyright,
+ fontname = t.fontname,
+ fullname = t.fullname,
+ familyname = t.familyname,
+ weight = t.weight,
+ widtht = t.width,
+ italicangle = tonumber(t.italicangle) or 0,
+ monospaced = toboolean(t.isfixedpitch) or false,
+ boundingbox = boundingbox,
+ version = t.version,
+ capheight = tonumber(t.capheight),
+ xheight = tonumber(t.xheight),
+ ascender = tonumber(t.ascender),
+ descender = tonumber(t.descender),
+ }
end
--- As we have lazy loading anyway, this one still is full and with less code than
--- the previous one. But this depends on the garbage collector to kick in and in the
--- current version that somehow happens not that often (on my machine I end up with
--- soem 3 GB extra before that happens).
+local p_spaces = lpegpatterns.whitespace
+local p_number = (R("09")+S(".-+"))^1 / tonumber
+local p_boolean = P("false") * Cc(false)
+ + P("false") * Cc(false)
+local p_string = P("(") * C((lpegpatterns.nestedparents + 1 - P(")"))^1) * P(")")
+local p_array = P("[") * Ct((p_number + p_boolean + p_string + p_spaces^1)^1) * P("]")
+ + P("{") * Ct((p_number + p_boolean + p_string + p_spaces^1)^1) * P("}")
--- local function get_full_info(...)
--- local ff = open_font(...)
--- if ff then
--- local d = { } -- ff is userdata so [1] or # fails on it
--- setmetatableindex(d,ff)
--- return d -- garbage collection will do the close_font(ff)
--- else
--- return nil, "error in loading font"
--- end
--- end
+local p_key = P("/") * C(R("AZ","az")^1)
+local p_value = p_string
+ + p_number
+ + p_boolean
+ + p_array
-fontloader.fullinfo = get_full_info
-filters .otf = get_full_info
-filters .ttf = get_full_info
+local p_entry = p_key * p_spaces^0 * p_value
function filters.afm(name)
-- we could parse the afm file as well, and then report an error but
@@ -441,24 +368,46 @@ function filters.afm(name)
local f = io.open(name)
if f then
local hash = { }
- for line in f:lines() do -- slow
- local key, value = match(line,"^(.+)%s+(.+)%s*$")
- if key and #key > 0 then
- hash[lower(key)] = value
- end
+ local okay = false
+ for line in f:lines() do -- slow but only a few lines at the beginning
if find(line,"StartCharMetrics",1,true) then
break
+ else
+ local key, value = match(line,"^(.+)%s+(.+)%s*$")
+ if key and #key > 0 then
+ hash[lower(key)] = value
+ end
end
end
f:close()
- return hash
+ return normalize(hash)
end
end
return nil, "no matching pfb file"
end
function filters.pfb(name)
- return get_font_info(name)
+ local f = io.open(name)
+ if f then
+ local hash = { }
+ local okay = false
+ for line in f:lines() do -- slow but only a few lines at the beginning
+ if find(line,"dict begin") then
+ okay = true
+ elseif not okay then
+ -- go on
+ elseif find(line,"currentdict end") then
+ break
+ else
+ local key, value = lpegmatch(p_entry,line)
+ if key and value then
+ hash[lower(key)] = value
+ end
+ end
+ end
+ f:close()
+ return normalize(hash)
+ end
end
--[[ldx--
@@ -468,8 +417,7 @@ for combination with the weight of a font.</p>
--ldx]]--
filters.list = {
- "otf", "ttf", "ttc", "dfont", "afm",
- -- "ttc", "otf", "ttf", "dfont", "afm",
+ "otf", "ttf", "ttc", "afm", -- no longer dfont support (for now)
}
-- to be considered: loop over paths per list entry (so first all otf ttf etc)
@@ -574,18 +522,18 @@ end
names.cleanname = cleanname
names.cleanfilename = cleanfilename
-local function check_names(result)
- local names = result.names
- if names then
- for i=1,#names do
- local name = names[i]
- if name.lang == "English (US)" then
- return name.names
- end
- end
- end
- return result
-end
+-- local function check_names(result)
+-- local names = result.names
+-- if names then
+-- for i=1,#names do
+-- local name = names[i]
+-- if name.lang == "English (US)" then
+-- return name.names
+-- end
+-- end
+-- end
+-- return result
+-- end
local function walk_tree(pathlist,suffix,identify)
if pathlist then
@@ -611,27 +559,16 @@ end
local function check_name(data,result,filename,modification,suffix,subfont)
-- shortcuts
local specifications = data.specifications
- -- prepare
- local names = check_names(result)
-- fetch
--- if string.find(string.lower(filename),"ebgaramond") then
--- inspect(result)
--- inspect(names)
--- end
-
-if string.find(filename,"avkv") then
- inspect(result)
-end
-
- local familyname = names and names.preffamilyname or result.familyname
- local fullname = names and names.fullname or result.fullname
+ local familyname = result.familyname
+ local fullname = result.fullname
local fontname = result.fontname
- local subfamily = names and names.subfamily or result.subfamily
- local modifiers = names and names.prefmodifiers or result.modifiers
- local weight = names and names.weight or result.weight
+ local subfamily = result.subfamily
+ local modifiers = result.modifiers
+ local weight = result.weight
local italicangle = tonumber(result.italicangle)
- local subfont = subfont or nil
- local rawname = fullname or fontname or familyname
+ local subfont = subfont
+ local rawname = fullname or fontname or familyname
local filebase = removesuffix(basename(filename))
local cleanfilename = cleanname(filebase) -- for WS
-- normalize
@@ -667,18 +604,18 @@ end
fullname = fullname or fontname
familyname = familyname or fontname
-- we do these sparse -- todo: check table type or change names in ff loader
- local units = result.units_per_em or result.emunits or 1000 -- can be zero too
- local minsize = result.design_range_bottom or result.mindesignsize or 0
- local maxsize = result.design_range_top or result.maxdesignsize or 0
- local designsize = result.design_size or result.designsize or 0
- local angle = result.italicangle or 0
- local pfminfo = result.pfminfo
- local pfmwidth = (pfminfo and pfminfo.width ) or result.pfmwidth or 0
- local pfmweight = (pfminfo and pfminfo.weight) or result.pfmweight or 0
+ local units = result.units or 1000 -- can be zero too
+ local designsize = result.designsize or 0
+ local minsize = result.mindesign or 0
+ local maxsize = result.maxdesign or 0
+ local angle = result.italicangle or 0
+ local pfmwidth = result.pfmwidth or 0
+ local pfmweight = result.pfmweight or 0
--
specifications[#specifications + 1] = {
filename = filename, -- unresolved
cleanfilename = cleanfilename,
+ -- subfontindex = subfont,
format = lower(suffix),
subfont = subfont,
rawname = rawname,
@@ -700,9 +637,6 @@ end
designsize = designsize ~= 0 and designsize or nil,
modification = modification ~= 0 and modification or nil,
}
--- inspect(filename)
--- inspect(result)
--- inspect(specifications[#specifications])
end
local function cleanupkeywords()
diff --git a/tex/context/base/font-tmp.lua b/tex/context/base/font-tmp.lua
new file mode 100644
index 000000000..5b4a08740
--- /dev/null
+++ b/tex/context/base/font-tmp.lua
@@ -0,0 +1,120 @@
+if not modules then modules = { } end modules ['font-tmp'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- There is a complet efeature loader but it needs a bit of testing, first so this
+-- one does design size only (as needed for identifying).
+
+local next, type = next, type
+
+local report = logs.reporter("otf reader")
+
+local files = utilities.files
+
+local readushort = files.readcardinal2 -- 16-bit unsigned integer
+local readulong = files.readcardinal4 -- 24-bit unsigned integer
+local readshort = files.readinteger2 -- 16-bit signed integer
+
+local readtag = function(f) return f:read(4) end
+local skipshort = function(f,n) f:read(n and 2*n or 2) end
+
+local readers = fonts.handlers.otf.readers
+
+local plugins = { }
+
+function plugins.size(f,fontdata,tableoffset,parameters)
+ if not fontdata.designsize then
+ f:seek("set",tableoffset+parameters)
+ local designsize = readushort(f)
+ if designsize > 0 then
+ fontdata.designsize = designsize
+ skipshort(f,2)
+ fontdata.minsize = readushort(f)
+ fontdata.maxsize = readushort(f)
+ end
+ end
+end
+
+local function readscripts(f,fontdata,what)
+ local datatable = fontdata.tables[what]
+ if not datatable then
+ return
+ end
+ local tableoffset = datatable.offset
+ f:seek("set",tableoffset)
+ local version = readulong(f)
+ if version ~= 0x00010000 then
+ report("table version %a of %a is not supported (yet), maybe font %s is bad",version,what,fontdata.filename)
+ return
+ end
+ --
+ local scriptoffset = tableoffset + readushort(f)
+ local featureoffset = tableoffset + readushort(f)
+ local lookupoffset = tableoffset + readushort(f)
+ --
+ f:seek("set",scriptoffset)
+ local nofscripts = readushort(f)
+ local scripts = { }
+ for i=1,nofscripts do
+ scripts[readtag(f)] = scriptoffset + readushort(f)
+ end
+ local languagesystems = table.setmetatableindex("table") -- we share when possible
+ for script, offset in next, scripts do
+ f:seek("set",offset)
+ local defaultoffset = readushort(f)
+ local noflanguages = readushort(f)
+ local languages = { }
+ if defaultoffset > 0 then
+ languages.dflt = languagesystems[offset + defaultoffset]
+ end
+ for i=1,noflanguages do
+ local language = readtag(f)
+ local offset = offset + readushort(f)
+ languages[language] = languagesystems[offset]
+ end
+ scripts[script] = languages
+ end
+ --
+ f:seek("set",featureoffset)
+ local features = { }
+ local noffeatures = readushort(f)
+ for i=1,noffeatures do
+ features[i] = {
+ tag = readtag(f),
+ offset = readushort(f)
+ }
+ end
+ --
+ for i=1,noffeatures do
+ local feature = features[i]
+ local offset = featureoffset + feature.offset
+ f:seek("set",offset)
+ local parameters = readushort(f) -- feature.parameters
+ local noflookups = readushort(f)
+ skipshort(f,noflookups+1)
+ if parameters > 0 then
+ feature.parameters = parameters
+ local plugin = plugins[feature.tag]
+ if plugin then
+ plugin(f,fontdata,offset,parameters)
+ end
+ end
+ end
+end
+
+function readers.gsub(f,fontdata,specification)
+ if specification.details then
+ readscripts(f,fontdata,"gsub")
+ end
+end
+
+function readers.gpos(f,fontdata,specification)
+ if specification.details then
+ readscripts(f,fontdata,"gpos")
+ end
+end
+
diff --git a/tex/context/base/font-ttf.lua b/tex/context/base/font-ttf.lua
new file mode 100644
index 000000000..f89c0b14e
--- /dev/null
+++ b/tex/context/base/font-ttf.lua
@@ -0,0 +1,475 @@
+if not modules then modules = { } end modules ['font-ttf'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local next, type, unpack = next, type, unpack
+local bittest = bit32.btest
+local sqrt = math.sqrt
+
+local report = logs.reporter("otf reader","ttf")
+
+local files = utilities.files
+
+local readbyte = files.readcardinal1 -- 8-bit unsigned integer
+local readushort = files.readcardinal2 -- 16-bit unsigned integer
+local readulong = files.readcardinal4 -- 24-bit unsigned integer
+local readchar = files.readinteger1 -- 8-bit signed integer
+local readshort = files.readinteger2 -- 16-bit signed integer
+local read2dot14 = files.read2dot14 -- 16-bit signed fixed number with the low 14 bits of fraction (2.14) (F2DOT14)
+
+local function mergecomposites(glyphs,shapes)
+
+ local function merge(index,shape,components)
+ local contours = { }
+ local nofcontours = 0
+ for i=1,#components do
+ local component = components[i]
+ local subindex = component.index
+ local subshape = shapes[subindex]
+ local subcontours = subshape.contours
+ if not subcontours then
+ local subcomponents = subshape.components
+ if subcomponents then
+ subcontours = merge(subindex,subshape,subcomponents)
+ end
+ end
+ if subcontours then
+ local matrix = component.matrix
+ local xscale = matrix[1]
+ local xrotate = matrix[2]
+ local yrotate = matrix[3]
+ local yscale = matrix[4]
+ local xoffset = matrix[5]
+ local yoffset = matrix[6]
+ for i=1,#subcontours do
+ local points = subcontours[i]
+ local result = { }
+ for i=1,#points do
+ local p = points[i]
+ local x = p[1]
+ local y = p[2]
+ result[i] = {
+ xscale * x + xrotate * y + xoffset,
+ yscale * y + yrotate * x + yoffset,
+ p[3]
+ }
+ end
+ nofcontours = nofcontours + 1
+ contours[nofcontours] = result
+ end
+ else
+ report("missing contours composite %s, component %s of %s, glyph %s",index,i,#components,subindex)
+ end
+ end
+ shape.contours = contours
+ shape.components = nil
+ return contours
+ end
+
+ for index=1,#glyphs do
+ local shape = shapes[index]
+ local components = shape.components
+ if components then
+ merge(index,shape,components)
+ end
+ end
+
+end
+
+local function readnothing(f,nofcontours)
+ return {
+ type = "nothing",
+ }
+end
+
+-- begin of converter
+
+-- make paths: the ff code is quite complex but it looks like we need to deal
+-- with all kind of on curve border cases
+
+local function curveto(m_x,m_y,l_x,l_y,r_x,r_y) -- todo: inline this
+ return {
+ l_x + 2/3 *(m_x-l_x), l_y + 2/3 *(m_y-l_y),
+ r_x + 2/3 *(m_x-r_x), r_y + 2/3 *(m_y-r_y),
+ r_x, r_y, "c" -- "curveto"
+ }
+end
+
+-- We could omit the operator which saves some 10%:
+--
+-- #2=lineto #4=quadratic #6=cubic #3=moveto (with "m")
+--
+-- For the moment we keep the original outlines but that default might change
+-- in the future. In any case, a backend should support both.
+--
+-- The code is a bit messy. I looked at the ff code but it's messy too. It has
+-- to do with the fact that we need to look at points on the curve and control
+-- points in between. This also means that we start at point 2 and have to look at
+-- point 1 when we're at the end. We still use a ps like storage with the operator
+-- last in an entry. It's typical code that evolves stepwise till a point of no
+-- comprehension.
+
+local function contours2outlines(glyphs,shapes)
+ local quadratic = true
+ -- local quadratic = false
+ for index=1,#glyphs do
+ local glyph = glyphs[index]
+ local shape = shapes[index]
+ local contours = shape.contours
+ if contours then
+ local nofcontours = #contours
+ local segments = { }
+ local nofsegments = 0
+ glyph.segments = segments
+ if nofcontours > 0 then
+ for i=1,nofcontours do
+ local contour = contours[i]
+ local nofcontour = #contour
+ if nofcontour > 0 then
+ local first_pt = contour[1]
+ local first_on = first_pt[3]
+ -- todo no new tables but reuse lineto and quadratic
+ if nofcontour == 1 then
+ -- this can influence the boundingbox
+ first_pt[3] = "m" -- "moveto"
+ nofsegments = nofsegments + 1
+ segments[nofsegments] = first_pt
+ else -- maybe also treat n == 2 special
+ local first_on = first_pt[3]
+ local last_pt = contour[nofcontour]
+ local last_on = last_pt[3]
+ local start = 1
+ local control_pt = false
+ if first_on then
+ start = 2
+ else
+ if last_on then
+ first_pt = last_pt
+ else
+ first_pt = { (first_pt[1]+last_pt[1])/2, (first_pt[2]+last_pt[2])/2, false }
+ end
+ control_pt = first_pt
+ end
+ nofsegments = nofsegments + 1
+ segments[nofsegments] = { first_pt[1], first_pt[2], "m" } -- "moveto"
+ local previous_pt = first_pt
+ for i=start,nofcontour do
+ local current_pt = contour[i]
+ local current_on = current_pt[3]
+ local previous_on = previous_pt[3]
+ if previous_on then
+ if current_on then
+ -- both normal points
+ nofsegments = nofsegments + 1
+ segments[nofsegments] = { current_pt[1], current_pt[2], "l" } -- "lineto"
+ else
+ control_pt = current_pt
+ end
+ elseif current_on then
+ local ps = segments[nofsegments]
+ nofsegments = nofsegments + 1
+ if quadratic then
+ segments[nofsegments] = { control_pt[1], control_pt[2], current_pt[1], current_pt[2], "q" } -- "quadraticto"
+ else
+ local p = segments[nofsegments-1] local n = #p
+ segments[nofsegments] = curveto(control_pt[1],control_pt[2],p[n-2],p[n-1],current_pt[1],current_pt[2])
+ end
+ control_pt = false
+ else
+ nofsegments = nofsegments + 1
+ local halfway_x = (previous_pt[1]+current_pt[1])/2
+ local halfway_y = (previous_pt[2]+current_pt[2])/2
+ if quadratic then
+ segments[nofsegments] = { control_pt[1], control_pt[2], halfway_x, halfway_y, "q" } -- "quadraticto"
+ else
+ local p = segments[nofsegments-1] local n = #p
+ segments[nofsegments] = curveto(control_pt[1],control_pt[2],p[n-2],p[n-1],halfway_x,halfway_y)
+ end
+ control_pt = current_pt
+ end
+ previous_pt = current_pt
+ end
+ if first_pt == last_pt then
+ -- we're already done, probably a simple curve
+ else
+ nofsegments = nofsegments + 1
+ if not control_pt then
+ segments[nofsegments] = { first_pt[1], first_pt[2], "l" } -- "lineto"
+ elseif quadratic then
+ segments[nofsegments] = { control_pt[1], control_pt[2], first_pt[1], first_pt[2], "q" } -- "quadraticto"
+ else
+ local p = last_pt local n = #p
+ segments[nofsegments] = curveto(control_pt[1],control_pt[2],p[n-2],p[n-1],first_pt[1],first_pt[2])
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
+-- end of converter
+
+local function readglyph(f,nofcontours)
+ local points = { }
+ local endpoints = { }
+ local instructions = { }
+ local flags = { }
+ for i=1,nofcontours do
+ endpoints[i] = readshort(f) + 1
+ end
+ local nofpoints = endpoints[nofcontours]
+ local nofinstructions = readushort(f)
+ f:seek("set",f:seek()+nofinstructions)
+ -- because flags can repeat we don't know the amount ... in fact this is
+ -- not that efficient (small files but more mem)
+ local i = 1
+ while i <= nofpoints do
+ local flag = readbyte(f)
+ flags[i] = flag
+ if bittest(flag,0x0008) then
+ for j=1,readbyte(f) do
+ i = i + 1
+ flags[i] = flag
+ end
+ end
+ i = i + 1
+ end
+ -- first come the x coordinates, and next the y coordinates and they
+ -- can be repeated
+ local x = 0
+ for i=1,nofpoints do
+ local flag = flags[i]
+ local short = bittest(flag,0x0002)
+ local same = bittest(flag,0x0010)
+ if short then
+ if same then
+ x = x + readbyte(f)
+ else
+ x = x - readbyte(f)
+ end
+ elseif same then
+ -- copy
+ else
+ x = x + readshort(f)
+ end
+ points[i] = { x, y, bittest(flag,0x0001) }
+ end
+ local y = 0
+ for i=1,nofpoints do
+ local flag = flags[i]
+ local short = bittest(flag,0x0004)
+ local same = bittest(flag,0x0020)
+ if short then
+ if same then
+ y = y + readbyte(f)
+ else
+ y = y - readbyte(f)
+ end
+ elseif same then
+ -- copy
+ else
+ y = y + readshort(f)
+ end
+ points[i][2] = y
+ end
+ -- we could integrate this if needed
+ local first = 1
+ for i=1,#endpoints do
+ local last = endpoints[i]
+ endpoints[i] = { unpack(points,first,last) }
+ first = last + 1
+ end
+ return {
+ type = "glyph",
+ -- points = points,
+ contours = endpoints,
+ }
+end
+
+local function readcomposite(f)
+ local components = { }
+ local nofcomponents = 0
+ local instructions = false
+ while true do
+ local flags = readushort(f)
+ local index = readushort(f)
+ ----- f_words = bittest(flags,0x0001)
+ local f_xyarg = bittest(flags,0x0002)
+ ----- f_round = bittest(flags,0x0004+0x0002)
+ ----- f_scale = bittest(flags,0x0008)
+ ----- f_reserved = bittest(flags,0x0010)
+ ----- f_more = bittest(flags,0x0020)
+ ----- f_xyscale = bittest(flags,0x0040)
+ ----- f_matrix = bittest(flags,0x0080)
+ ----- f_instruct = bittest(flags,0x0100)
+ ----- f_usemine = bittest(flags,0x0200)
+ ----- f_overlap = bittest(flags,0x0400)
+ local f_offset = bittest(flags,0x0800)
+ ----- f_uoffset = bittest(flags,0x1000)
+ local xscale = 1
+ local xrotate = 0
+ local yrotate = 0
+ local yscale = 1
+ local xoffset = 0
+ local yoffset = 0
+ local base = false
+ local reference = false
+ if f_xyarg then
+ if bittest(flags,0x0001) then -- f_words
+ xoffset = readshort(f)
+ yoffset = readshort(f)
+ else
+ xoffset = readchar(f) -- signed byte, stupid name
+ yoffset = readchar(f) -- signed byte, stupid name
+ end
+ else
+ if bittest(flags,0x0001) then -- f_words
+ base = readshort(f)
+ reference = readshort(f)
+ else
+ base = readchar(f) -- signed byte, stupid name
+ reference = readchar(f) -- signed byte, stupid name
+ end
+ end
+ if bittest(flags,0x0008) then -- f_scale
+ xscale = read2dot14(f)
+ yscale = xscale
+ if f_xyarg and f_offset then
+ xoffset = xoffset * xscale
+ yoffset = yoffset * yscale
+ end
+ elseif bittest(flags,0x0040) then -- f_xyscale
+ xscale = read2dot14(f)
+ yscale = read2dot14(f)
+ if f_xyarg and f_offset then
+ xoffset = xoffset * xscale
+ yoffset = yoffset * yscale
+ end
+ elseif bittest(flags,0x0080) then -- f_matrix
+ xscale = read2dot14(f)
+ xrotate = read2dot14(f)
+ yrotate = read2dot14(f)
+ yscale = read2dot14(f)
+ if f_xyarg and f_offset then
+ xoffset = xoffset * sqrt(xscale ^2 + xrotate^2)
+ yoffset = yoffset * sqrt(yrotate^2 + yscale ^2)
+ end
+ end
+ nofcomponents = nofcomponents + 1
+ components[nofcomponents] = {
+ index = index,
+ usemine = bittest(flags,0x0200), -- f_usemine
+ round = bittest(flags,0x0006), -- f_round,
+ base = base,
+ reference = reference,
+ matrix = { xscale, xrotate, yrotate, yscale, xoffset, yoffset },
+ }
+ if bittest(flags,0x0100) then
+ instructions = true
+ end
+ if not bittest(flags,0x0020) then -- f_more
+ break
+ end
+ end
+ return {
+ type = "composite",
+ components = components,
+ }
+end
+
+-- function readers.cff(f,offset,glyphs,doshapes) -- false == no shapes (nil or true otherwise)
+
+-- The glyf table depends on the loca table. We have one entry to much
+-- in the locations table (the last one is a dummy) because we need to
+-- calculate the size of a glyph blob from the delta, although we not
+-- need it in our usage (yet). We can remove the locations table when
+-- we're done (todo: cleanup finalizer).
+
+function fonts.handlers.otf.readers.loca(f,fontdata,specification)
+ if specification.glyphs then
+ local datatable = fontdata.tables.loca
+ if datatable then
+ -- locations are relative to the glypdata table (glyf)
+ local offset = fontdata.tables.glyf.offset
+ local format = fontdata.fontheader.indextolocformat
+ local locations = { }
+ f:seek("set",datatable.offset)
+ if format == 1 then
+ local nofglyphs = datatable.length/4 - 1
+ -1
+ for i=0,nofglyphs do
+ locations[i] = offset + readulong(f)
+ end
+ fontdata.nofglyphs = nofglyphs
+ else
+ local nofglyphs = datatable.length/2 - 1
+ -1
+ for i=0,nofglyphs do
+ locations[i] = offset + readushort(f) * 2
+ end
+ fontdata.nofglyphs = nofglyphs
+ end
+ fontdata.locations = locations
+ end
+ end
+end
+
+function fonts.handlers.otf.readers.glyf(f,fontdata,specification) -- part goes to cff module
+ if specification.glyphs then
+ local datatable = fontdata.tables.glyf
+ if datatable then
+ local locations = fontdata.locations
+ if locations then
+ local glyphs = fontdata.glyphs
+ local nofglyphs = fontdata.nofglyphs
+ local filesize = fontdata.filesize
+ local nothing = { 0, 0, 0, 0 }
+ local shapes = { }
+ local loadshapes = specification.shapes
+ for index=0,nofglyphs do
+ local location = locations[index]
+ if location >= filesize then
+ report("discarding %s glyphs due to glyph location bug",nofglyphs-index+1)
+ fontdata.nofglyphs = index - 1
+ fontdata.badfont = true
+ break
+ elseif location > 0 then
+ f:seek("set",location)
+ local nofcontours = readshort(f)
+ glyphs[index].boundingbox = {
+ readshort(f), -- xmin
+ readshort(f), -- ymin
+ readshort(f), -- xmax
+ readshort(f), -- ymax
+ }
+ if not loadshapes then
+ -- save space
+ elseif nofcontours == 0 then
+ shapes[index] = readnothing(f,nofcontours)
+ elseif nofcontours > 0 then
+ shapes[index] = readglyph(f,nofcontours)
+ else
+ shapes[index] = readcomposite(f,nofcontours)
+ end
+ else
+ if loadshapes then
+ shapes[index] = { }
+ end
+ glyphs[index].boundingbox = nothing
+ end
+ end
+ if loadshapes then
+ mergecomposites(glyphs,shapes)
+ contours2outlines(glyphs,shapes)
+ end
+ end
+ end
+ end
+end
diff --git a/tex/context/base/lxml-lpt.lua b/tex/context/base/lxml-lpt.lua
index decb6567b..add29241e 100644
--- a/tex/context/base/lxml-lpt.lua
+++ b/tex/context/base/lxml-lpt.lua
@@ -837,14 +837,12 @@ xml.nodesettostring = nodesettostring
local lpath -- we have a harmless kind of circular reference
-local lshowoptions = { functions = false }
-
local function lshow(parsed)
if type(parsed) == "string" then
parsed = lpath(parsed)
end
report_lpath("%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,
- table.serialize(parsed,false,lshowoptions))
+ table.serialize(parsed,false))
end
xml.lshow = lshow
diff --git a/tex/context/base/meta-imp-outlines.mkiv b/tex/context/base/meta-imp-outlines.mkiv
new file mode 100644
index 000000000..d47ec7754
--- /dev/null
+++ b/tex/context/base/meta-imp-outlines.mkiv
@@ -0,0 +1,150 @@
+%D \module
+%D [ file=meta-imp-outlines,
+%D version=2015.06.02,
+%D title=\METAPOST\ Graphics,
+%D subtitle=Outlines,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\startluacode
+
+local concat = table.concat
+local formatters = string.formatters
+local validstring = string.valid
+
+local f_setbounds = formatters["setbounds currentpicture to (%s) enlarged %.4G;"]
+local f_index = formatters['draw anchored.bot(textext("\\tttf\\setstrut\\strut index %i") ysized 2bp ,.5[llcorner currentpicture,lrcorner currentpicture] shifted (0,%.4G));']
+local f_unicode = formatters['draw anchored.bot(textext("\\tttf\\setstrut\\strut unicode %05X") ysized 2bp ,.5[llcorner currentpicture,lrcorner currentpicture] shifted (0,%.4G));']
+
+local f_in_red = formatters["draw %s withpen pencircle scaled .15 withcolor .5red;"]
+local f_in_green = formatters["draw %s withpen pencircle scaled .15 withcolor .5green;"]
+local f_in_blue = formatters["draw %s withpen pencircle scaled .15 withcolor .5blue;"]
+local f_in_gray = formatters["draw image(%s) withcolor .75yellow;"]
+
+local f_glyph = formatters [ [[
+pickup pencircle scaled .15;
+pointlabelfont := "Mono sa .125";
+pointlabelscale := 1bp ;
+drawoptionsfactor := .2bp ;
+originlength := 2bp ;
+%s;
+]] ]
+
+local metapost = fonts.metapost
+
+local variables = interfaces.variables
+
+local v_all = variables.all
+local v_page = variables.page
+local v_text = variables.text
+local v_command = variables.command
+
+function metapost.showglyph(specification)
+ local fontid = font.current()
+ local shapedata = fonts.hashes.shapes[fontid] -- by index
+ local chardata = fonts.hashes.characters[fontid] -- by unicode
+ local shapeglyphs = shapedata.glyphs
+ local character = validstring(specification.character)
+ local index = validstring(specification.index)
+ local alternative = validstring(specification.alternative)
+ local command = validstring(specification.command)
+
+ local function shape(index,what,f_comment)
+ if not index then
+ return
+ end
+ local glyph = shapeglyphs[index]
+ if glyph and glyph.segments or glyph.sequence then
+ local units = data.fontheader and data.fontheader.emsize or 1000
+ local factor = 100/units
+ local paths = metapost.paths(glyph,factor)
+ if #paths > 0 then
+ local graphic = f_glyph(concat{
+ f_in_gray(metapost.fill(paths)),
+ metapost.draw(paths,true),
+ f_in_red(metapost.boundingbox(glyph,factor)),
+ f_in_green(metapost.widthline(glyph,factor)),
+ f_in_blue(metapost.zeroline(glyph,factor)),
+ f_setbounds(metapost.maxbounds(data,index,factor),offset or 1),
+ f_comment(what,1)
+ })
+ if alternative == v_page then
+ context.startMPpage()
+ context(graphic)
+ context.stopMPpage()
+ elseif alternative == v_command then
+ context[command](graphic)
+ else -- v_text
+ context.startMPcode()
+ context(graphic)
+ context.stopMPcode()
+ end
+ end
+ end
+ end
+
+ if character == v_all then
+ for u, c in table.sortedhash(chardata) do
+ shape(c.index,u,f_unicode)
+ end
+ return
+ end
+ if type(character) == "string" then
+ character = utf.byte(character)
+ end
+ if type(character) == "number" then
+ local c = chardata[character]
+ if c then
+ shape(c.index,c.index,f_index)
+ end
+ return
+ end
+ if type(index) == "number" then
+ shape(index,index,f_index)
+ return
+ end
+ for index=1,#shapeglyphs do
+ shape(index,index,f_index)
+ end
+end
+
+\stopluacode
+
+\unprotect
+
+\unexpanded\def\showshape
+ {\dosingleargument\meta_shapes_show}
+
+\def\meta_shapes_show[#1]%
+ {\begingroup
+ \getdummyparameters[\c!alternative=\v!text,#1]%
+ \ctxlua{fonts.metapost.showglyph{
+ character = "\dummyparameter\c!character",
+ index = "\dummyparameter\c!index",
+ alternative = "\dummyparameter\c!alternative",
+ command = "\dummyparameter\c!command",
+ }}%
+ \endgroup}
+
+\protect
+
+\continueifinputfile{meta-imp-outlines.mkiv}
+
+\starttext
+
+\setupbodyfont[pagella]
+
+\showshape[character=all,alternative=page]
+
+% \setupbodyfont[dejavu]
+% \showshape[character=P,alternative=text]
+
+% \definedfont[almfixed]
+% \showshape[character=all,alternative=page]
+
+\stoptext
diff --git a/tex/context/base/mult-def.mkiv b/tex/context/base/mult-def.mkiv
index d547a7b81..bad77eac2 100644
--- a/tex/context/base/mult-def.mkiv
+++ b/tex/context/base/mult-def.mkiv
@@ -72,6 +72,9 @@
\def\c!keeptogether {keeptogether}
\def\c!viewerprefix {viewerprefix}
+\def\c!index {index} % not a register but a number (of a glyph)
+\def\c!character {character}
+
\def\v!display {display}
\def\v!inline {inline}
diff --git a/tex/context/base/publ-ini.lua b/tex/context/base/publ-ini.lua
index 3791bd532..368b8c6ff 100644
--- a/tex/context/base/publ-ini.lua
+++ b/tex/context/base/publ-ini.lua
@@ -2336,7 +2336,7 @@ do
marked_dataset = dataset
marked_list = list
marked_method = method
- -- btxflushmarked() -- here (could also be done in caller)
+ btxflushmarked() -- here (could also be done in caller)
else
marked_todo = false
end
@@ -2502,6 +2502,9 @@ do
--
local found, todo, list = findallused(dataset,reference,internal,method == v_text or method == v_always) -- also when not in list
--
+-- inspect(found)
+-- inspect(todo)
+-- inspect(list)
if not found or #found == 0 then
report("no entry %a found in dataset %a",reference,dataset)
elseif not setup then
@@ -2629,7 +2632,7 @@ do
marked_dataset = dataset
marked_list = list
marked_method = method
- -- btxflushmarked() -- here (could also be done in caller)
+ btxflushmarked() -- here (could also be done in caller)
else
marked_todo = false
end
diff --git a/tex/context/base/publ-ini.mkiv b/tex/context/base/publ-ini.mkiv
index fd9e4ad97..fb61788be 100644
--- a/tex/context/base/publ-ini.mkiv
+++ b/tex/context/base/publ-ini.mkiv
@@ -1315,7 +1315,7 @@
after {\p_publ_cite_after}%
\relax
\iftrialtypesetting\else
- \clf_btxflushmarked
+ %\clf_btxflushmarked
\fi}
\let\dobtxcitevariantblob\publ_cite_handle_variant_blob % command can use it via lua
@@ -1369,7 +1369,7 @@
dataset {\currentbtxdataset}%
reference {\currentbtxreference}%
\relax
- \clf_btxflushmarked
+ %\clf_btxflushmarked
\endgroup
\fi}
diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf
index 0349ed89b..0f9551bc7 100644
--- a/tex/context/base/status-files.pdf
+++ b/tex/context/base/status-files.pdf
Binary files differ
diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf
index 72620604e..f405b93ea 100644
--- a/tex/context/base/status-lua.pdf
+++ b/tex/context/base/status-lua.pdf
Binary files differ
diff --git a/tex/context/base/util-tab.lua b/tex/context/base/util-tab.lua
index 0ab388826..d6f3d6731 100644
--- a/tex/context/base/util-tab.lua
+++ b/tex/context/base/util-tab.lua
@@ -557,7 +557,7 @@ local f_table_finish = formatters["}"]
local spaces = utilities.strings.newrepeater(" ")
-local serialize = table.serialize -- the extensive one, the one we started with
+local original_serialize = table.serialize -- the extensive one, the one we started with
-- there is still room for optimization: index run, key run, but i need to check with the
-- latest lua for the value of #n (with holes) .. anyway for tracing purposes we want
@@ -566,7 +566,7 @@ local serialize = table.serialize -- the extensive one, the one we started with
local function serialize(root,name,specification)
if type(specification) == "table" then
- return serialize(root,name,specification) -- the original one
+ return original_serialize(root,name,specification) -- the original one
end
local t -- = { }
diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua
index dee3ebec7..2d39df62b 100644
--- a/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 06/12/15 10:06:12
+-- merge date : 06/13/15 09:52:29
do -- begin closure to overcome local limits and interference
@@ -4380,6 +4380,7 @@ function constructors.scale(tfmdata,specification)
local hdelta=delta
local vdelta=delta
target.designsize=parameters.designsize
+ target.units=units
target.units_per_em=units
local direction=properties.direction or tfmdata.direction or 0
target.direction=direction
@@ -4781,11 +4782,20 @@ function constructors.finalize(tfmdata)
if not parameters.slantfactor then
parameters.slantfactor=tfmdata.slant or 0
end
- if not parameters.designsize then
- parameters.designsize=tfmdata.designsize or (factors.pt*10)
+ local designsize=parameters.designsize
+ if designsize then
+ parameters.minsize=tfmdata.minsize or designsize
+ parameters.maxsize=tfmdata.maxsize or designsize
+ else
+ designsize=factors.pt*10
+ parameters.designsize=designsize
+ parameters.minsize=designsize
+ parameters.maxsize=designsize
end
+ parameters.minsize=tfmdata.minsize or parameters.designsize
+ parameters.maxsize=tfmdata.maxsize or parameters.designsize
if not parameters.units then
- parameters.units=tfmdata.units_per_em or 1000
+ parameters.units=tfmdata.units or tfmdata.units_per_em or 1000
end
if not tfmdata.descriptions then
local descriptions={}
@@ -4848,6 +4858,7 @@ function constructors.finalize(tfmdata)
tfmdata.auto_protrude=nil
tfmdata.extend=nil
tfmdata.slant=nil
+ tfmdata.units=nil
tfmdata.units_per_em=nil
tfmdata.cache=nil
properties.finalized=true
@@ -6086,7 +6097,7 @@ local keys={}
function keys.FontName (data,line) data.metadata.fontname=strip (line)
data.metadata.fullname=strip (line) end
function keys.ItalicAngle (data,line) data.metadata.italicangle=tonumber (line) end
-function keys.IsFixedPitch(data,line) data.metadata.isfixedpitch=toboolean(line,true) end
+function keys.IsFixedPitch(data,line) data.metadata.monospaced=toboolean(line,true) end
function keys.CharWidth (data,line) data.metadata.charwidth=tonumber (line) end
function keys.XHeight (data,line) data.metadata.xheight=tonumber (line) end
function keys.Descender (data,line) data.metadata.descender=tonumber (line) end
@@ -6508,7 +6519,7 @@ local function copytotfm(data)
local emdash=0x2014
local spacer="space"
local spaceunits=500
- local monospaced=metadata.isfixedpitch
+ local monospaced=metadata.monospaced
local charwidth=metadata.charwidth
local italicangle=metadata.italicangle
local charxheight=metadata.xheight and metadata.xheight>0 and metadata.xheight
@@ -7195,7 +7206,7 @@ local report_otf=logs.reporter("fonts","otf loading")
local fonts=fonts
local otf=fonts.handlers.otf
otf.glists={ "gsub","gpos" }
-otf.version=2.814
+otf.version=2.815
otf.cache=containers.define("fonts","otf",otf.version,true)
local hashes=fonts.hashes
local definers=fonts.definers
@@ -9165,9 +9176,13 @@ local function copytotfm(data,cache_id)
local spaceunits=500
local spacer="space"
local designsize=metadata.designsize or metadata.design_size or 100
+ local minsize=metadata.minsize or metadata.design_range_bottom or designsize
+ local maxsize=metadata.maxsize or metadata.design_range_top or designsize
local mathspecs=metadata.math
if designsize==0 then
designsize=100
+ minsize=100
+ maxsize=100
end
if mathspecs then
for name,value in next,mathspecs do
@@ -9227,13 +9242,13 @@ local function copytotfm(data,cache_id)
local fontname=metadata.fontname
local fullname=metadata.fullname or fontname
local psname=fontname or fullname
- local units=metadata.units_per_em or 1000
+ local units=metadata.units or metadata.units_per_em or 1000
if units==0 then
units=1000
- metadata.units_per_em=1000
+ metadata.units=1000
report_otf("changing %a units to %a",0,units)
end
- local monospaced=metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion=="Monospaced")
+ local monospaced=metadata.monospaced or metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion=="Monospaced")
local charwidth=pfminfo.avgwidth
local charxheight=pfminfo.os2_xheight and pfminfo.os2_xheight>0 and pfminfo.os2_xheight
local italicangle=metadata.italicangle
@@ -9298,8 +9313,10 @@ local function copytotfm(data,cache_id)
end
end
parameters.designsize=(designsize/10)*65536
- parameters.ascender=abs(metadata.ascent or 0)
- parameters.descender=abs(metadata.descent or 0)
+ parameters.minsize=(minsize/10)*65536
+ parameters.maxsize=(maxsize/10)*65536
+ parameters.ascender=abs(metadata.ascender or metadata.ascent or 0)
+ parameters.descender=abs(metadata.descender or metadata.descent or 0)
parameters.units=units
properties.space=spacer
properties.encodingbytes=2