summaryrefslogtreecommitdiff
path: root/tex/context/base/mkiv
diff options
context:
space:
mode:
authorContext Git Mirror Bot <phg42.2a@gmail.com>2016-08-15 23:17:11 +0200
committerContext Git Mirror Bot <phg42.2a@gmail.com>2016-08-15 23:17:11 +0200
commit30b3a925bfc1857a31e23d9b17b8da0be572d02a (patch)
tree994685218d0ab6c1c65df36dcc5a5a08a231171e /tex/context/base/mkiv
parent01440ae8999ee20351f0538792e415ade8cd3d5c (diff)
downloadcontext-30b3a925bfc1857a31e23d9b17b8da0be572d02a.tar.gz
2016-08-15 22:45:00
Diffstat (limited to 'tex/context/base/mkiv')
-rw-r--r--tex/context/base/mkiv/cont-new.mkiv2
-rw-r--r--tex/context/base/mkiv/context.mkiv2
-rw-r--r--tex/context/base/mkiv/font-cff.lua230
-rw-r--r--tex/context/base/mkiv/font-lib.mkvi4
-rw-r--r--tex/context/base/mkiv/font-onr.lua197
-rw-r--r--tex/context/base/mkiv/font-otr.lua181
-rw-r--r--tex/context/base/mkiv/font-ots.lua10
-rw-r--r--tex/context/base/mkiv/font-shp.lua193
-rw-r--r--tex/context/base/mkiv/luat-mac.lua5
-rw-r--r--tex/context/base/mkiv/math-map.lua8
-rw-r--r--tex/context/base/mkiv/meta-imp-outlines.mkiv4
-rw-r--r--tex/context/base/mkiv/meta-tex.lua4
-rw-r--r--tex/context/base/mkiv/mlib-lua.lua1
-rw-r--r--tex/context/base/mkiv/mlib-pps.lua9
-rw-r--r--tex/context/base/mkiv/mtx-context-xml.tex15
-rw-r--r--tex/context/base/mkiv/mult-fun.lua8
-rw-r--r--tex/context/base/mkiv/node-ini.lua66
-rw-r--r--tex/context/base/mkiv/status-files.pdfbin9132 -> 9240 bytes
-rw-r--r--tex/context/base/mkiv/status-lua.pdfbin368689 -> 368827 bytes
-rw-r--r--tex/context/base/mkiv/trac-inf.lua15
20 files changed, 618 insertions, 336 deletions
diff --git a/tex/context/base/mkiv/cont-new.mkiv b/tex/context/base/mkiv/cont-new.mkiv
index eab508e59..11508c4c1 100644
--- a/tex/context/base/mkiv/cont-new.mkiv
+++ b/tex/context/base/mkiv/cont-new.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2016.08.11 13:56}
+\newcontextversion{2016.08.15 22:40}
%D This file is loaded at runtime, thereby providing an excellent place for
%D hacks, patches, extensions and new features.
diff --git a/tex/context/base/mkiv/context.mkiv b/tex/context/base/mkiv/context.mkiv
index dd2143252..fc4b3427a 100644
--- a/tex/context/base/mkiv/context.mkiv
+++ b/tex/context/base/mkiv/context.mkiv
@@ -39,7 +39,7 @@
%D up and the dependencies are more consistent.
\edef\contextformat {\jobname}
-\edef\contextversion{2016.08.11 13:56}
+\edef\contextversion{2016.08.15 22:40}
\edef\contextkind {beta}
%D For those who want to use this:
diff --git a/tex/context/base/mkiv/font-cff.lua b/tex/context/base/mkiv/font-cff.lua
index 8c57b473e..8f33c76c4 100644
--- a/tex/context/base/mkiv/font-cff.lua
+++ b/tex/context/base/mkiv/font-cff.lua
@@ -26,6 +26,7 @@ local concat, remove = table.concat, table.remove
local floor, abs, round, ceil = math.floor, math.abs, math.round, math.ceil
local P, C, R, S, C, Cs, Ct = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Ct
local lpegmatch = lpeg.match
+local formatters = string.formatters
local readers = fonts.handlers.otf.readers
local streamreader = readers.streamreader
@@ -594,6 +595,7 @@ do
local ymax = 0
local checked = false
local keepcurve = false
+ local version = 2
local function showstate(where)
report("%w%-10s : [%s] n=%i",depth*2,where,concat(stack," ",1,top),top)
@@ -1052,13 +1054,114 @@ do
end
end
- local function unsupported()
+ local function unsupported(t)
if trace_charstrings then
- showstate("unsupported")
+ showstate("unsupported " .. t)
end
top = 0
end
+ local function unsupportedsub(t)
+ if trace_charstrings then
+ showstate("unsupported sub " .. t)
+ end
+ top = 0
+ end
+
+ -- type 1 (not used in type 2)
+
+ local function getstem3()
+ if trace_charstrings then
+ showstate("stem3")
+ end
+ top = 0
+ end
+
+ local function divide()
+ if version == 1 then
+ local d = stack[top]
+ top = top - 1
+ stack[top] = stack[top] / d
+ end
+ end
+
+ local function closepath()
+ if version == 1 then
+ if trace_charstrings then
+ showstate("closepath")
+ end
+ end
+ top = 0
+ end
+
+ local function hsbw()
+ if version == 1 then
+ if trace_charstrings then
+ showstate("dotsection")
+ end
+ width = stack[top]
+ end
+ top = 0
+ end
+
+ local function seac()
+ if version == 1 then
+ if trace_charstrings then
+ showstate("seac")
+ end
+ end
+ top = 0
+ end
+
+ local function sbw()
+ if version == 1 then
+ if trace_charstrings then
+ showstate("sbw")
+ end
+ width = stack[top-1]
+ end
+ top = 0
+ end
+
+ -- these are probably used for special cases i.e. call out to postscript
+
+ local function callothersubr()
+ if version == 1 then
+ -- we don't support this (ok, we could mimick these othersubs)
+ if trace_charstrings then
+ showstate("callothersubr (unsupported)")
+ end
+ end
+ top = 0
+ end
+
+ local function pop()
+ if version == 1 then
+ -- we don't support this
+ if trace_charstrings then
+ showstate("pop (unsupported)")
+ end
+ top = top + 1
+ stack[top] = 0 -- a dummy
+ else
+ top = 0
+ end
+ end
+
+ local function setcurrentpoint()
+ if version == 1 then
+ -- we don't support this
+ if trace_charstrings then
+ showstate("pop (unsupported)")
+ end
+ x = x + stack[top-1]
+ y = y + stack[top]
+ end
+ top = 0
+ end
+
+ -- so far for unsupported postscript
+
-- Bah, we cannot use a fast lpeg because a hint has an unknown size and a
-- runtime capture cannot handle that well.
@@ -1076,7 +1179,7 @@ do
unsupported, -- 10 -- calllocal,
unsupported, -- 11 -- callreturn,
unsupported, -- 12 -- elsewhere
- unsupported, -- 13 -- hsbw
+ hsbw, -- 13 -- hsbw (type 1 cff)
unsupported, -- 14 -- endchar,
unsupported, -- 15
unsupported, -- 16
@@ -1098,6 +1201,17 @@ do
}
local subactions = {
+ -- cff 1
+ [000] = dotsection,
+ [001] = getstem3,
+ [002] = getstem3,
+ [006] = seac,
+ [007] = sbw,
+ [012] = divide,
+ [016] = callothersubr,
+ [017] = pop,
+ [033] = setcurrentpoint,
+ -- cff 2
[034] = hflex,
[035] = flex,
[036] = hflex1,
@@ -1107,23 +1221,29 @@ do
local p_bytes = Ct((P(1)/byte)^0)
local function call(scope,list,bias,process)
- local index = stack[top] + bias
- top = top - 1
- if trace_charstrings then
- showvalue(scope,index,true)
- end
- local str = list[index]
- if str then
- if type(str) == "string" then
- str = lpegmatch(p_bytes,str)
- list[index] = str
- end
- depth = depth + 1
- process(str)
- depth = depth - 1
+ depth = depth + 1
+ if top == 0 then
+ showstate(formatters["unknown %s call"](scope))
+ top = 0
else
- report("unknown %s %i",scope,index)
+ local index = stack[top] + bias
+ top = top - 1
+ if trace_charstrings then
+ showvalue(scope,index,true)
+ end
+ local tab = list[index]
+ if tab then
+ if type(tab) == "string" then
+ tab = lpegmatch(p_bytes,tab)
+ list[index] = tab
+ end
+ process(tab)
+ else
+ showstate(formatters["unknown %s call %i"](scope,index))
+ top = 0
+ end
end
+ depth = depth - 1
end
local function process(tab)
@@ -1131,7 +1251,7 @@ do
local n = #tab
while i <= n do
local t = tab[i]
- if t >= 32 and t<=246 then
+ if t >= 32 and t <= 246 then
-- -107 .. +107
top = top + 1
stack[top] = t - 139
@@ -1196,7 +1316,7 @@ do
local t = tab[i]
local a = subactions[t]
if a then
- a()
+ a(t)
else
if trace_charstrings then
showvalue("<subaction>",t)
@@ -1207,7 +1327,7 @@ do
else
local a = actions[t]
if a then
- local s = a()
+ local s = a(t)
if s then
i = i + s
end
@@ -1260,27 +1380,44 @@ do
-- end
-- end
- parsecharstrings = function(data,glyphs,doshapes)
+ local function setbias(globals,locals)
+ if version == 1 then
+ return
+ false,
+ false
+ else
+ local g, l = #globals, #locals
+ return
+ ((g < 1240 and 107) or (g < 33900 and 1131) or 32768) + 1,
+ ((l < 1240 and 107) or (l < 33900 and 1131) or 32768) + 1
+ end
+ end
+
+ parsecharstrings = function(data,glyphs,doshapes,tversion)
-- for all charstrings
local dictionary = data.dictionaries[1]
local charstrings = dictionary.charstrings
local charset = dictionary.charset
+ local private = dictionary.private or { data = { } }
+
keepcurve = doshapes
+ version = tversion
stack = { }
glyphs = glyphs or { }
strings = data.strings
- locals = dictionary.subroutines
- globals = data.routines
- globalbias = #globals
- localbias = #locals
- globalbias = ((globalbias < 1240 and 107) or (globalbias < 33900 and 1131) or 32768) + 1
- localbias = ((localbias < 1240 and 107) or (localbias < 33900 and 1131) or 32768) + 1
- local nominalwidth = dictionary.private.data.nominalwidthx or 0
- local defaultwidth = dictionary.private.data.defaultwidthx or 0
+ globals = data.routines or { }
+ locals = dictionary.subroutines or { }
+
+ globalbias, localbias = setbias(globals,locals,version)
+
+ local nominalwidth = private.data.nominalwidthx or 0
+ local defaultwidth = private.data.defaultwidthx or 0
for i=1,#charstrings do
- local str = charstrings[i]
- local tab = lpegmatch(p_bytes,str)
+ local tab = charstrings[i]
+ if type(tab) == "string" then
+ tab = lpegmatch(p_bytes,tab)
+ end
local index = i - 1
x = 0
y = 0
@@ -1341,20 +1478,22 @@ do
return glyphs
end
- parsecharstring = function(data,dictionary,charstring,glyphs,index,doshapes)
+ parsecharstring = function(data,dictionary,tab,glyphs,index,doshapes,version)
local private = dictionary.private
keepcurve = doshapes
strings = data.strings -- or in dict?
locals = dictionary.subroutines or { }
globals = data.routines or { }
- globalbias = #globals
- localbias = #locals
- globalbias = ((globalbias < 1240 and 107) or (globalbias < 33900 and 1131) or 32768) + 1
- localbias = ((localbias < 1240 and 107) or (localbias < 33900 and 1131) or 32768) + 1
+
+ globalbias = setbias(globals,locals)
+
local nominalwidth = private and private.data.nominalwidthx or 0
local defaultwidth = private and private.data.defaultwidthx or 0
--
- local tab = lpegmatch(p_bytes,charstring)
+ if type(tab) == "string" then
+ tab = lpegmatch(p_bytes,tab)
+ end
+ --
x = 0
y = 0
width = false
@@ -1384,7 +1523,8 @@ do
width = nominalwidth + width
end
--
-index = index - 1
+ index = index - 1
+ --
local glyph = glyphs[index] -- can be autodefined in otr
if not glyph then
glyphs[index] = {
@@ -1410,8 +1550,6 @@ index = index - 1
report("width: %s",tostring(width))
report("boundingbox: % t",boundingbox)
end
- --
- return charstring
end
resetcharstrings = function()
@@ -1542,7 +1680,7 @@ local function readcidprivates(f,data)
parseprivates(data,dictionaries)
end
-local function readnoselect(f,data,glyphs,doshapes)
+local function readnoselect(f,data,glyphs,doshapes,version)
local dictionaries = data.dictionaries
local dictionary = dictionaries[1]
readglobals(f,data)
@@ -1552,11 +1690,13 @@ local function readnoselect(f,data,glyphs,doshapes)
readprivates(f,data)
parseprivates(data,data.dictionaries)
readlocals(f,data,dictionary)
- parsecharstrings(data,glyphs,doshapes)
+ parsecharstrings(data,glyphs,doshapes,version)
resetcharstrings()
end
-local function readfdselect(f,data,glyphs,doshapes)
+readers.parsecharstrings = parsecharstrings
+
+local function readfdselect(f,data,glyphs,doshapes,version)
local header = data.header
local dictionaries = data.dictionaries
local dictionary = dictionaries[1]
@@ -1615,7 +1755,7 @@ local function readfdselect(f,data,glyphs,doshapes)
readlocals(f,data,dictionaries[i])
end
for i=1,#charstrings do
- parsecharstring(data,dictionaries[fdindex[i]+1],charstrings[i],glyphs,i,doshapes)
+ parsecharstring(data,dictionaries[fdindex[i]+1],charstrings[i],glyphs,i,doshapes,version)
end
resetcharstrings()
end
diff --git a/tex/context/base/mkiv/font-lib.mkvi b/tex/context/base/mkiv/font-lib.mkvi
index 6b891542c..ed7d896c5 100644
--- a/tex/context/base/mkiv/font-lib.mkvi
+++ b/tex/context/base/mkiv/font-lib.mkvi
@@ -61,6 +61,10 @@
%registerctxluafile{font-afm}{1.001}
\registerctxluafile{font-afk}{1.001}
+% shapes
+
+\registerctxluafile{font-shp}{1.001}
+
% tfm
\registerctxluafile{font-tfm}{1.001}
diff --git a/tex/context/base/mkiv/font-onr.lua b/tex/context/base/mkiv/font-onr.lua
index dcf7445e5..6c33b24c6 100644
--- a/tex/context/base/mkiv/font-onr.lua
+++ b/tex/context/base/mkiv/font-onr.lua
@@ -50,31 +50,41 @@ and <l n='otf'/> reader.</p>
and new vectors (we actually had one bad vector with the old loader).</p>
--ldx]]--
-local get_indexes
+local get_indexes, get_shapes
do
- local n, m
+ local decrypt
- local progress = function(str,position,name,size)
- local forward = position + tonumber(size) + 3 + 2
- n = n + 1
- if n >= m then
- return #str, name
- elseif forward < #str then
- return forward, name
- else
- return #str, name
+ do
+
+ local r, c1, c2, n = 0, 0, 0, 0
+
+ local function step(c)
+ local cipher = byte(c)
+ local plain = bxor(cipher,rshift(r,8))
+ r = ((cipher + r) * c1 + c2) % 65536
+ return char(plain)
end
- end
- local initialize = function(str,position,size)
- n = 0
- m = size -- % tonumber(size)
- return position + 1
+ decrypt = function(binary,initial,seed)
+ r, c1, c2, n = initial, 52845, 22719, seed
+ binary = gsub(binary,".",step)
+ return sub(binary,n+1)
+ end
+
+ -- local pattern = Cs((P(1) / step)^1)
+ --
+ -- decrypt = function(binary,initial,seed)
+ -- r, c1, c2, n = initial, 52845, 22719, seed
+ -- binary = lpegmatch(pattern,binary)
+ -- return sub(binary,n+1)
+ -- end
+
end
local charstrings = P("/CharStrings")
+ local subroutines = P("/Subrs")
local encoding = P("/Encoding")
local dup = P("dup")
local put = P("put")
@@ -85,9 +95,64 @@ do
local spaces = P(" ")^1
local spacing = patterns.whitespace^0
+ local routines, vector, chars, n, m
+
+ local initialize = function(str,position,size)
+ n = 0
+ m = size -- % tonumber(size)
+ return position + 1
+ end
+
+ local setroutine = function(str,position,index,size)
+ local forward = position + tonumber(size)
+ local stream = sub(str,position+1,forward)
+ routines[index] = decrypt(stream,4330,4)
+ return forward
+ end
+
+ local setvector = function(str,position,name,size)
+ local forward = position + tonumber(size)
+ if n >= m then
+ return #str
+ elseif forward < #str then
+ vector[n] = name
+ n = n + 1 -- we compensate for notdef at the cff loader end
+ return forward
+ else
+ return #str
+ end
+ end
+
+ local setshapes = function(str,position,name,size)
+ local forward = position + tonumber(size)
+ local stream = sub(str,position+1,forward)
+ if n > m then
+ return #str
+ elseif forward < #str then
+ vector[n] = name
+ n = n + 1
+ chars [n] = decrypt(stream,4330,4)
+ return forward
+ else
+ return #str
+ end
+ end
+
+ local p_rd = spacing * (P("RD") + P("-|"))
+ local p_np = spacing * (P("NP") + P( "|"))
+ local p_nd = spacing * (P("ND") + P( "|"))
+
+ local p_filterroutines = -- dup <i> <n> RD or -| <n encrypted bytes> NP or |
+ (1-subroutines)^0 * subroutines * spaces * Cmt(cardinal,initialize)
+ * (Cmt(cardinal * spaces * cardinal * p_rd, setroutine) * p_np + P(1))^1
+
+ local p_filtershapes = -- /foo <n> RD <n encrypted bytes> ND
+ (1-charstrings)^0 * charstrings * spaces * Cmt(cardinal,initialize)
+ * (Cmt(name * spaces * cardinal * p_rd, setshapes) * p_nd + P(1))^1
+
local p_filternames = Ct (
(1-charstrings)^0 * charstrings * spaces * Cmt(cardinal,initialize)
- * (Cmt(name * spaces * cardinal, progress) + P(1))^1
+ * (Cmt(name * spaces * cardinal, setvector) + P(1))^1
)
-- /Encoding 256 array
@@ -102,36 +167,7 @@ do
-- if one of first 4 not 0-9A-F then binary else hex
- local decrypt
-
- do
-
- local r, c1, c2, n = 0, 0, 0, 0
-
- local function step(c)
- local cipher = byte(c)
- local plain = bxor(cipher,rshift(r,8))
- r = ((cipher + r) * c1 + c2) % 65536
- return char(plain)
- end
-
- decrypt = function(binary)
- r, c1, c2, n = 55665, 52845, 22719, 4
- binary = gsub(binary,".",step)
- return sub(binary,n+1)
- end
-
- -- local pattern = Cs((P(1) / step)^1)
- --
- -- decrypt = function(binary)
- -- r, c1, c2, n = 55665, 52845, 22719, 4
- -- binary = lpegmatch(pattern,binary)
- -- return sub(binary,n+1)
- -- end
-
- end
-
- local function loadpfbvector(filename)
+ local function loadpfbvector(filename,shapestoo)
-- for the moment limited to encoding only
local data = io.loaddata(resolvers.findfile(filename))
@@ -153,28 +189,36 @@ do
return
end
- binary = decrypt(binary,4)
+ binary = decrypt(binary,55665,4)
- local vector = lpegmatch(p_filternames,binary)
-
--- if vector[1] == ".notdef" then
--- -- tricky
--- vector[0] = table.remove(vector,1)
--- end
-
- for i=1,#vector do
- vector[i-1] = vector[i]
+ local names = { }
+ local encoding = lpegmatch(p_filterencoding,ascii)
+ local glyphs = { }
+
+ routines, vector, chars = { }, { }, { }
+
+ if shapestoo then
+ lpegmatch(p_filterroutines,binary)
+ lpegmatch(p_filtershapes,binary)
+ local data = {
+ dictionaries = {
+ {
+ charstrings = chars,
+ charset = vector,
+ subroutines = routines,
+ }
+ },
+ }
+ fonts.handlers.otf.readers.parsecharstrings(data,glyphs,true,true)
+ else
+ lpegmatch(p_filternames,binary)
end
- vector[#vector] = nil
- if not vector then
- report_pfb("no vector in %a",filename)
- return
- end
+ names = vector
- local encoding = lpegmatch(p_filterencoding,ascii)
+ routines, vector, chars = nil, nil, nil
- return vector, encoding
+ return names, encoding, glyphs
end
@@ -202,6 +246,11 @@ do
end
end
+ get_shapes = function(pfbname)
+ local vector, encoding, glyphs = loadpfbvector(pfbname,true)
+ return glyphs
+ end
+
end
--[[ldx--
@@ -428,6 +477,26 @@ function readers.loadfont(afmname,pfbname)
end
end
+-- for now, todo: n and check with otf (no afm needed here)
+
+function readers.loadshapes(filename)
+ local fullname = resolvers.findfile(filename) or ""
+ if fullname == "" then
+ return {
+ filename = "not found: " .. filename,
+ glyphs = { }
+ }
+ else
+ return {
+ filename = fullname,
+ format = "opentype",
+ glyphs = get_shapes(fullname) or { },
+ units = 1000,
+ }
+ end
+end
+
+
function readers.getinfo(filename)
local data = read(resolvers.findfile(filename),infoparser)
if data then
diff --git a/tex/context/base/mkiv/font-otr.lua b/tex/context/base/mkiv/font-otr.lua
index 7c81285c1..9cdbc3df0 100644
--- a/tex/context/base/mkiv/font-otr.lua
+++ b/tex/context/base/mkiv/font-otr.lua
@@ -1747,118 +1747,6 @@ function readers.math(f,fontdata,specification)
end
end
--- Goodie. A sequence instead of segments costs a bit more memory, some 300K on a
--- dejavu serif and about the same on a pagella regular.
-
-local function packoutlines(data,makesequence)
- local subfonts = data.subfonts
- if subfonts then
- for i=1,#subfonts do
- packoutlines(subfonts[i],makesequence)
- end
- return
- end
- local common = data.segments
- if common then
- return
- end
- local glyphs = data.glyphs
- if not glyphs then
- return
- end
- if makesequence then
- for index=1,#glyphs do
- local glyph = glyphs[index]
- local segments = glyph.segments
- if segments then
- local sequence = { }
- local nofsequence = 0
- for i=1,#segments do
- local segment = segments[i]
- local nofsegment = #segment
- nofsequence = nofsequence + 1
- sequence[nofsequence] = segment[nofsegment]
- for i=1,nofsegment-1 do
- nofsequence = nofsequence + 1
- sequence[nofsequence] = segment[i]
- end
- end
- glyph.sequence = sequence
- glyph.segments = nil
- end
- end
- else
- local hash = { }
- local common = { }
- local reverse = { }
- local last = 0
- for index=1,#glyphs do
- local segments = glyphs[index].segments
- if segments then
- for i=1,#segments do
- local h = concat(segments[i]," ")
- hash[h] = (hash[h] or 0) + 1
- end
- end
- end
- for index=1,#glyphs do
- local segments = glyphs[index].segments
- if segments then
- for i=1,#segments do
- local segment = segments[i]
- local h = concat(segment," ")
- if hash[h] > 1 then -- minimal one shared in order to hash
- local idx = reverse[h]
- if not idx then
- last = last + 1
- reverse[h] = last
- common[last] = segment
- idx = last
- end
- segments[i] = idx
- end
- end
- end
- end
- if last > 0 then
- data.segments = common
- end
- end
-end
-
-local function unpackoutlines(data)
- local subfonts = data.subfonts
- if subfonts then
- for i=1,#subfonts do
- unpackoutlines(subfonts[i])
- end
- return
- end
- local common = data.segments
- if not common then
- return
- end
- local glyphs = data.glyphs
- if not glyphs then
- return
- end
- for index=1,#glyphs do
- local segments = glyphs[index].segments
- if segments then
- for i=1,#segments do
- local c = common[segments[i]]
- if c then
- segments[i] = c
- end
- end
- end
- end
- data.segments = nil
-end
-
-otf.packoutlines = packoutlines
-otf.unpackoutlines = unpackoutlines
-
-- Now comes the loader. The order of reading these matters as we need to know
-- some properties in order to read following tables. When details is true we also
-- initialize the glyphs data.
@@ -2152,6 +2040,15 @@ function readers.loadshapes(filename,n)
shapes = true,
subfont = n,
}
+ if fontdata then
+ -- easier on luajit but still we can hit the 64 K stack constants issue
+ for k, v in next, fontdata.glyphs do
+ v.class = nil
+ v.index = nil
+ v.math = nil
+ -- v.name = nil
+ end
+ end
return fontdata and {
-- version = 0.123 -- todo
filename = filename,
@@ -2302,63 +2199,3 @@ function readers.extend(fontdata)
end
end
end
-
--- for now .. this will move to a context specific file
-
-if fonts.hashes then
-
- local identifiers = fonts.hashes.identifiers
- local loadshapes = readers.loadshapes
-
- readers.version = 0.006
- readers.cache = containers.define("fonts", "shapes", readers.version, true)
-
- -- todo: loaders per format
-
- local function load(filename,sub)
- local base = file.basename(filename)
- local name = file.removesuffix(base)
- local kind = file.suffix(filename)
- local attr = lfs.attributes(filename)
- local size = attr and attr.size or 0
- local time = attr and attr.modification or 0
- local sub = tonumber(sub)
- if size > 0 and (kind == "otf" or kind == "ttf" or kind == "tcc") then
- local hash = containers.cleanname(base) -- including suffix
- if sub then
- hash = hash .. "-" .. sub
- end
- data = containers.read(readers.cache,hash)
- if not data or data.time ~= time or data.size ~= size then
- data = loadshapes(filename,sub)
- if data then
- data.size = size
- data.format = data.format or (kind == "otf" and "opentype") or "truetype"
- data.time = time
- packoutlines(data)
- containers.write(readers.cache,hash,data)
- data = containers.read(readers.cache,hash) -- frees old mem
- end
- end
- unpackoutlines(data)
- else
- data = {
- filename = filename,
- size = 0,
- time = time,
- format = "unknown",
- units = 1000,
- glyphs = { }
- }
- end
- return data
- end
-
- fonts.hashes.shapes = table.setmetatableindex(function(t,k)
- local d = identifiers[k]
- local v = load(d.properties.filename,d.subindex)
- t[k] = v
- return v
- end)
-
-end
diff --git a/tex/context/base/mkiv/font-ots.lua b/tex/context/base/mkiv/font-ots.lua
index d37115603..8443a383d 100644
--- a/tex/context/base/mkiv/font-ots.lua
+++ b/tex/context/base/mkiv/font-ots.lua
@@ -3665,7 +3665,7 @@ if fontfeatures then
function otf.handlers.trigger_space_kerns(head,start,dataset,sequence,_,_,_,_,font,attr)
local features = fontfeatures[font]
- local enabled = features.spacekern == true and features.kern == true
+ local enabled = features and features.spacekern and features.kern
if enabled then
setspacekerns(font,sequence)
end
@@ -3677,7 +3677,7 @@ else -- generic (no hashes)
function otf.handlers.trigger_space_kerns(head,start,dataset,sequence,_,_,_,_,font,attr)
local shared = fontdata[font].shared
local features = shared and shared.features
- local enabled = features and features.spacekern == true and features.kern == true
+ local enabled = features and features.spacekern and features.kern
if enabled then
setspacekerns(font,sequence)
end
@@ -3739,7 +3739,7 @@ local function spaceinitializer(tfmdata,value) -- attr
if kern then
if feat then
for script, languages in next, kern do
- local f = feat[k]
+ local f = feat[script]
if f then
for l in next, languages do
f[l] = true
@@ -3759,7 +3759,7 @@ local function spaceinitializer(tfmdata,value) -- attr
if kerns then
for k, v in next, kerns do
if type(v) == "table" then
- right[k] = v[3] -- needs checking
+ right[k] = v[1][3]
else
right[k] = v
end
@@ -3769,7 +3769,7 @@ local function spaceinitializer(tfmdata,value) -- attr
local kern = v[32]
if kern then
if type(kern) == "table" then
- left[k] = kern[3] -- needs checking
+ left[k] = kern[1][3]
else
left[k] = kern
end
diff --git a/tex/context/base/mkiv/font-shp.lua b/tex/context/base/mkiv/font-shp.lua
new file mode 100644
index 000000000..92ff70127
--- /dev/null
+++ b/tex/context/base/mkiv/font-shp.lua
@@ -0,0 +1,193 @@
+if not modules then modules = { } end modules ['font-shp'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local concat = table.concat
+local load, tonumber = load, tonumber
+
+local otf = fonts.handlers.otf
+local afm = fonts.handlers.afm
+
+local hashes = fonts.hashes
+local identifiers = hashes.identifiers
+
+local version = 0.006
+local cache = containers.define("fonts", "shapes", version, true)
+
+-- shapes (can be come a separate file at some point)
+
+local function packoutlines(data,makesequence)
+ local subfonts = data.subfonts
+ if subfonts then
+ for i=1,#subfonts do
+ packoutlines(subfonts[i],makesequence)
+ end
+ return
+ end
+ local common = data.segments
+ if common then
+ return
+ end
+ local glyphs = data.glyphs
+ if not glyphs then
+ return
+ end
+ if makesequence then
+ for index=1,#glyphs do
+ local glyph = glyphs[index]
+ local segments = glyph.segments
+ if segments then
+ local sequence = { }
+ local nofsequence = 0
+ for i=1,#segments do
+ local segment = segments[i]
+ local nofsegment = #segment
+ nofsequence = nofsequence + 1
+ sequence[nofsequence] = segment[nofsegment]
+ for i=1,nofsegment-1 do
+ nofsequence = nofsequence + 1
+ sequence[nofsequence] = segment[i]
+ end
+ end
+ glyph.sequence = sequence
+ glyph.segments = nil
+ end
+ end
+ else
+ local hash = { }
+ local common = { }
+ local reverse = { }
+ local last = 0
+ for index=1,#glyphs do
+ local segments = glyphs[index].segments
+ if segments then
+ for i=1,#segments do
+ local h = concat(segments[i]," ")
+ hash[h] = (hash[h] or 0) + 1
+ end
+ end
+ end
+ for index=1,#glyphs do
+ local segments = glyphs[index].segments
+ if segments then
+ for i=1,#segments do
+ local segment = segments[i]
+ local h = concat(segment," ")
+ if hash[h] > 1 then -- minimal one shared in order to hash
+ local idx = reverse[h]
+ if not idx then
+ last = last + 1
+ reverse[h] = last
+ common[last] = segment
+ idx = last
+ end
+ segments[i] = idx
+ end
+ end
+ end
+ end
+ if last > 0 then
+ data.segments = common
+ end
+ end
+end
+
+local function unpackoutlines(data)
+ local subfonts = data.subfonts
+ if subfonts then
+ for i=1,#subfonts do
+ unpackoutlines(subfonts[i])
+ end
+ return
+ end
+ local common = data.segments
+ if not common then
+ return
+ end
+ local glyphs = data.glyphs
+ if not glyphs then
+ return
+ end
+ for index=1,#glyphs do
+ local segments = glyphs[index].segments
+ if segments then
+ for i=1,#segments do
+ local c = common[segments[i]]
+ if c then
+ segments[i] = c
+ end
+ end
+ end
+ end
+ data.segments = nil
+end
+
+-- todo: loaders per format
+
+local function load(filename,sub)
+ local base = file.basename(filename)
+ local name = file.removesuffix(base)
+ local kind = file.suffix(filename)
+ local attr = lfs.attributes(filename)
+ local size = attr and attr.size or 0
+ local time = attr and attr.modification or 0
+ local sub = tonumber(sub)
+
+ -- fonts.formats
+
+ if size > 0 and (kind == "otf" or kind == "ttf" or kind == "tcc") then
+ local hash = containers.cleanname(base) -- including suffix
+ if sub then
+ hash = hash .. "-" .. sub
+ end
+ data = containers.read(cache,hash)
+ if not data or data.time ~= time or data.size ~= size then
+ data = otf.readers.loadshapes(filename,sub)
+ if data then
+ data.size = size
+ data.format = data.format or (kind == "otf" and "opentype") or "truetype"
+ data.time = time
+ packoutlines(data)
+ containers.write(cache,hash,data)
+ data = containers.read(cache,hash) -- frees old mem
+ end
+ end
+ unpackoutlines(data)
+ elseif size > 0 and (kind == "pfb") then
+ local hash = containers.cleanname(base) -- including suffix
+ data = containers.read(cache,hash)
+ if not data or data.time ~= time or data.size ~= size then
+ data = afm.readers.loadshapes(filename)
+ if data then
+ data.size = size
+ data.format = "type1"
+ data.time = time
+ packoutlines(data)
+ containers.write(cache,hash,data)
+ data = containers.read(cache,hash) -- frees old mem
+ end
+ end
+ unpackoutlines(data)
+ else
+ data = {
+ filename = filename,
+ size = 0,
+ time = time,
+ format = "unknown",
+ units = 1000,
+ glyphs = { }
+ }
+ end
+ return data
+end
+
+hashes.shapes = table.setmetatableindex(function(t,k)
+ local d = identifiers[k]
+ local v = load(d.properties.filename,d.subindex)
+ t[k] = v
+ return v
+end)
diff --git a/tex/context/base/mkiv/luat-mac.lua b/tex/context/base/mkiv/luat-mac.lua
index 4274fe9f9..3f1fe6751 100644
--- a/tex/context/base/mkiv/luat-mac.lua
+++ b/tex/context/base/mkiv/luat-mac.lua
@@ -145,9 +145,8 @@ local grammar = { "converter",
* stopcode
* poplocal,
texbody = (
-leadingcomment -- new per 2015-03-03 (ugly)
-+
- V("definition")
+ leadingcomment -- new per 2015-03-03 (ugly)
+ + V("definition")
+ identifier
+ V("braced")
+ (1 - stopcode)
diff --git a/tex/context/base/mkiv/math-map.lua b/tex/context/base/mkiv/math-map.lua
index 4eb76ac8c..cf9353e95 100644
--- a/tex/context/base/mkiv/math-map.lua
+++ b/tex/context/base/mkiv/math-map.lua
@@ -34,7 +34,7 @@ if not modules then modules = { } end modules ['math-map'] = {
local type, next = type, next
local floor, div = math.floor, math.div
-local merged = table.merged
+local merged, sortedhash = table.merged, table.sortedhash
local extract = bit32.extract
local allocate = utilities.storage.allocate
@@ -557,10 +557,10 @@ mathematics.mapremap = mathremap
local boldmap = allocate { }
mathematics.boldmap = boldmap
--- all math (a bit of redundancy here)
+-- all math (a bit of redundancy here) (sorted for tracing)
-for alphabet, styles in next, alphabets do -- per 9/6/2011 we also have attr for missing
- for style, data in next, styles do
+for alphabet, styles in sortedhash(alphabets) do -- per 9/6/2011 we also have attr for missing
+ for style, data in sortedhash(styles) do
-- let's keep the long names (for tracing)
local n = #mathremap + 1
local d = {
diff --git a/tex/context/base/mkiv/meta-imp-outlines.mkiv b/tex/context/base/mkiv/meta-imp-outlines.mkiv
index ab6fcdd3d..199dadb32 100644
--- a/tex/context/base/mkiv/meta-imp-outlines.mkiv
+++ b/tex/context/base/mkiv/meta-imp-outlines.mkiv
@@ -54,7 +54,7 @@ function metapost.showglyph(specification)
local fontid = font.current()
local shapedata = fonts.hashes.shapes[fontid] -- by index
local chardata = fonts.hashes.characters[fontid] -- by unicode
- local shapeglyphs = shapedata.glyphs
+ local shapeglyphs = shapedata.glyphs or { }
local character = validstring(specification.character)
local index = validstring(specification.index)
local alternative = validstring(specification.alternative)
@@ -181,6 +181,8 @@ end
\usemodule[art-01]
+% \definedfont[lt55476.afm]
+
\startcombination[3*1]
{\ruledhbox{\startMPcode draw textext("\showshape[character=a]") ; \stopMPcode}} {}
{\ruledhbox{\startMPcode draw textext("\showshape[character=x]") ; \stopMPcode}} {}
diff --git a/tex/context/base/mkiv/meta-tex.lua b/tex/context/base/mkiv/meta-tex.lua
index 1008e45c0..9c5a2186d 100644
--- a/tex/context/base/mkiv/meta-tex.lua
+++ b/tex/context/base/mkiv/meta-tex.lua
@@ -187,9 +187,9 @@ function mp.format(fmt,str)
mpprint(f_textext(formatters[fmt](metapost.untagvariable(str,false))))
end
-function mp.formatted(fmt,num) -- svformat
+function mp.formatted(fmt,...) -- svformat
fmt = lpegmatch(cleaner,fmt)
- mpprint(f_textext(formatters[fmt](tonumber(num) or num)))
+ mpprint(f_textext(formatters[fmt](...)))
end
function mp.graphformat(fmt,num) -- nvformat
diff --git a/tex/context/base/mkiv/mlib-lua.lua b/tex/context/base/mkiv/mlib-lua.lua
index 7b2c67220..93ae74244 100644
--- a/tex/context/base/mkiv/mlib-lua.lua
+++ b/tex/context/base/mkiv/mlib-lua.lua
@@ -412,7 +412,6 @@ function mp.tb_dimensions(category,name)
mptriplet(w/factor,h/factor,d/factor)
end
-
function mp.report(a,b)
if b then
report_message("%s : %s",a,b)
diff --git a/tex/context/base/mkiv/mlib-pps.lua b/tex/context/base/mkiv/mlib-pps.lua
index f72061372..8d200ac63 100644
--- a/tex/context/base/mkiv/mlib-pps.lua
+++ b/tex/context/base/mkiv/mlib-pps.lua
@@ -1160,16 +1160,17 @@ local function sh_process(object,prescript,before,after)
local sh_type = prescript.sh_type
if sh_type then
nofshades = nofshades + 1
- local domain = lpegmatch(domainsplitter,prescript.sh_domain or "0 1")
- local centera = lpegmatch(centersplitter,prescript.sh_center_a or "0 0")
- local centerb = lpegmatch(centersplitter,prescript.sh_center_b or "0 0")
+ local domain = lpegmatch(domainsplitter,prescript.sh_domain or "0 1")
+ local centera = lpegmatch(centersplitter,prescript.sh_center_a or "0 0")
+ local centerb = lpegmatch(centersplitter,prescript.sh_center_b or "0 0")
+ local transform = toboolean(prescript.sh_transform or "yes",true)
-- compensation for scaling
local sx = 1
local sy = 1
local sr = 1
local dx = 0
local dy = 0
- if true then
+ if transform then
local first = lpegmatch(coordinatesplitter,prescript.sh_first or "0 0")
local setx = lpegmatch(coordinatesplitter,prescript.sh_set_x or "0 0")
local sety = lpegmatch(coordinatesplitter,prescript.sh_set_y or "0 0")
diff --git a/tex/context/base/mkiv/mtx-context-xml.tex b/tex/context/base/mkiv/mtx-context-xml.tex
index 8e4d229f5..f8bfeef3a 100644
--- a/tex/context/base/mkiv/mtx-context-xml.tex
+++ b/tex/context/base/mkiv/mtx-context-xml.tex
@@ -18,12 +18,14 @@
% usage: context --extra=xml [options] list-of-files
%
% --analyze : show elements and characters
+% --template : also export template
% --topspace=dimension : distance above first line
% --backspace=dimension : distance before left margin
% --bodyfont=list : additional bodyfont settings
% --paperformat=spec : paper*print or paperxprint
%
% context --extra=xml --analyze path::i-context.xml
+% context --extra=xml --analyze --template path::i-context.xml
% context --extra=xml --analyze selfautoparent:texmf-context/tex/context/interface/mkiv/i-*.xml
% end help
@@ -52,8 +54,10 @@
\starttext
\startluacode
- local files = document.files
- local pattern = document.arguments.pattern or (#files == 1 and files[1])
+ local files = document.files
+ local pattern = document.arguments.pattern or (#files == 1 and files[1])
+ local analyze = document.arguments.analyze
+ local template = document.arguments.template
if pattern then
files = dir.glob(pattern)
@@ -63,12 +67,15 @@
end
if #files > 0 then
- if document.arguments.analyze then
- moduledata.xml.analyzers.structure (files)
+ if analyze then
+ moduledata.xml.analyzers.structure(files)
context.page()
moduledata.xml.analyzers.characters(files)
context.page()
moduledata.xml.analyzers.entities(files)
+ if template then
+ moduledata.xml.analyzers.allsetups(files,type(template) == "string" and template or nil)
+ end
else
context("no action given")
end
diff --git a/tex/context/base/mkiv/mult-fun.lua b/tex/context/base/mkiv/mult-fun.lua
index c6d321949..5996b9ac6 100644
--- a/tex/context/base/mkiv/mult-fun.lua
+++ b/tex/context/base/mkiv/mult-fun.lua
@@ -24,7 +24,7 @@ return {
"invsin", "invcos", "invtan", "acosh", "asinh", "sinh", "cosh",
"zmod",
"paired", "tripled",
- "unitcircle", "fulldiamond", "unitdiamond", "fullsquare",
+ "unitcircle", "fulldiamond", "unitdiamond", "fullsquare", "unittriangle", "fulltriangle",
-- "halfcircle", "quartercircle",
"llcircle", "lrcircle", "urcircle", "ulcircle",
"tcircle", "bcircle", "lcircle", "rcircle",
@@ -45,8 +45,10 @@ return {
"withshade", "withcircularshade", "withlinearshade", -- old but kept
"defineshade", "shaded",
-- "withshading", "withlinearshading", "withcircularshading", "withfromshadecolor", "withtoshadecolor",
- "shadedinto", "withshadecolors", "withshadedomain", "withshademethod", "withshadefactor", "withshadevector",
- "withshadecenter", "withshadedirection", "withshadestep", "withshadefraction",
+ "shadedinto", "withshadecolors",
+ "withshadedomain", "withshademethod", "withshadefactor", "withshadevector",
+ "withshadecenter", "withshadedirection", "withshaderadius", "withshadetransform",
+ "withshadestep", "withshadefraction",
"cmyk", "spotcolor", "multitonecolor", "namedcolor",
"drawfill", "undrawfill",
"inverted", "uncolored", "softened", "grayed", "greyed",
diff --git a/tex/context/base/mkiv/node-ini.lua b/tex/context/base/mkiv/node-ini.lua
index f8720f717..1a9d141f4 100644
--- a/tex/context/base/mkiv/node-ini.lua
+++ b/tex/context/base/mkiv/node-ini.lua
@@ -248,6 +248,7 @@ local accentcodes = mark(getsubtypes("accent"))
-- [1] = "left",
-- [2] = "middle",
-- [3] = "right",
+-- [4] = "no",
-- }
local fencecodes = mark(getsubtypes("fence"))
@@ -274,6 +275,18 @@ local usercodes = allocate {
[116] = "tokens" -- t
}
+local noadoptions = allocate {
+ set = 0x08,
+ unused_1 = 0x00 + 0x08,
+ unused_2 = 0x01 + 0x08,
+ axis = 0x02 + 0x08,
+ no_axis = 0x04 + 0x08,
+ exact = 0x10 + 0x08,
+ left = 0x11 + 0x08,
+ middle = 0x12 + 0x08,
+ right = 0x14 + 0x08,
+}
+
skipcodes = allocate(swapped(skipcodes,skipcodes))
boundarycodes = allocate(swapped(boundarycodes,boundarycodes))
noadcodes = allocate(swapped(noadcodes,noadcodes))
@@ -293,6 +306,7 @@ fencecodes = allocate(swapped(fencecodes,fencecodes))
rulecodes = allocate(swapped(rulecodes,rulecodes))
leadercodes = allocate(swapped(leadercodes,leadercodes))
usercodes = allocate(swapped(usercodes,usercodes))
+noadoptions = allocate(swapped(noadoptions,noadoptions))
nodes.skipcodes = skipcodes
nodes.boundarycodes = boundarycodes
@@ -313,6 +327,7 @@ nodes.fencecodes = fencecodes
nodes.rulecodes = rulecodes
nodes.leadercodes = leadercodes
nodes.usercodes = usercodes
+nodes.noadoptions = noadoptions
nodes.gluecodes = skipcodes -- more official
nodes.whatsitcodes = whatcodes -- more official
@@ -327,25 +342,38 @@ kerncodes.kerning = kerncodes.fontkern
kerncodes.italiccorrection = kerncodes.italiccorrection or 1 -- new
nodes.codes = allocate { -- mostly for listing
- glue = skipcodes,
- boundary = boundarycodes,
- noad = noadcodes,
- node = nodecodes,
- hlist = listcodes,
- vlist = listcodes,
- glyph = glyphcodes,
- kern = kerncodes,
- penalty = penaltycodes,
- math = mathnodes,
- fill = fillcodes,
- margin = margincodes,
- disc = disccodes,
- whatsit = whatcodes,
- accent = accentcodes,
- fence = fencecodes,
- rule = rulecodes,
- leader = leadercodes,
- user = usercodes,
+ glue = skipcodes,
+ boundary = boundarycodes,
+ noad = noadcodes,
+ node = nodecodes,
+ hlist = listcodes,
+ vlist = listcodes,
+ glyph = glyphcodes,
+ kern = kerncodes,
+ penalty = penaltycodes,
+ math = mathnodes,
+ fill = fillcodes,
+ margin = margincodes,
+ disc = disccodes,
+ whatsit = whatcodes,
+ accent = accentcodes,
+ fence = fencecodes,
+ rule = rulecodes,
+ leader = leadercodes,
+ user = usercodes,
+ noadoptions = noadoptions,
+}
+
+nodes.noadoptions = {
+ set = 0x08,
+ unused_1 = 0x00 + 0x08,
+ unused_2 = 0x01 + 0x08,
+ axis = 0x02 + 0x08,
+ no_axis = 0x04 + 0x08,
+ exact = 0x10 + 0x08,
+ left = 0x11 + 0x08,
+ middle = 0x12 + 0x08,
+ right = 0x14 + 0x08,
}
local report_codes = logs.reporter("nodes","codes")
diff --git a/tex/context/base/mkiv/status-files.pdf b/tex/context/base/mkiv/status-files.pdf
index 3192f32d8..10ad1bcfd 100644
--- a/tex/context/base/mkiv/status-files.pdf
+++ b/tex/context/base/mkiv/status-files.pdf
Binary files differ
diff --git a/tex/context/base/mkiv/status-lua.pdf b/tex/context/base/mkiv/status-lua.pdf
index 4dc611165..05f1666a9 100644
--- a/tex/context/base/mkiv/status-lua.pdf
+++ b/tex/context/base/mkiv/status-lua.pdf
Binary files differ
diff --git a/tex/context/base/mkiv/trac-inf.lua b/tex/context/base/mkiv/trac-inf.lua
index a1d7fb0a0..12a4f646f 100644
--- a/tex/context/base/mkiv/trac-inf.lua
+++ b/tex/context/base/mkiv/trac-inf.lua
@@ -61,12 +61,13 @@ local function stoptiming(instance)
timer.timing = it - 1
else
local starttime = timer.starttime
- if starttime then
- local stoptime = clock()
- local loadtime = stoptime - starttime
- timer.stoptime = stoptime
- timer.loadtime = timer.loadtime + loadtime
- timer.timing = 0
+ if starttime and starttime > 0 then
+ local stoptime = clock()
+ local loadtime = stoptime - starttime
+ timer.stoptime = stoptime
+ timer.loadtime = timer.loadtime + loadtime
+ timer.timing = 0
+ timer.starttime = 0
return loadtime
end
end
@@ -183,7 +184,7 @@ end
function statistics.runtime()
stoptiming(statistics)
- stoptiming(statistics) -- somehow we can start the timer twice, but where
+ -- stoptiming(statistics) -- somehow we can start the timer twice, but where
return statistics.formatruntime(elapsedtime(statistics))
end